[ 478.538082] env[61629]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61629) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 478.538421] env[61629]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61629) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 478.538556] env[61629]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61629) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 478.538859] env[61629]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 478.633673] env[61629]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61629) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 478.643474] env[61629]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=61629) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 479.243855] env[61629]: INFO nova.virt.driver [None req-9f8fdfa9-1707-4ff3-b4a2-57556492060c None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 479.313589] env[61629]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 479.313734] env[61629]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 479.313847] env[61629]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61629) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 482.473563] env[61629]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-16430454-bcc7-4e3a-8ee1-9f2973b5bfef {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 482.489307] env[61629]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61629) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 482.489468] env[61629]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-e0bc1db8-1a23-4101-b6ca-288343932f86 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 482.523413] env[61629]: INFO oslo_vmware.api [-] Successfully established new session; session ID is d4495. [ 482.523592] env[61629]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.210s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 482.524090] env[61629]: INFO nova.virt.vmwareapi.driver [None req-9f8fdfa9-1707-4ff3-b4a2-57556492060c None None] VMware vCenter version: 7.0.3 [ 482.527438] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b2da63-5691-4595-a2f4-a576fe68a6c7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 482.544146] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80d878a-34e6-4e97-86b5-e52a6928074a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 482.549689] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e79a41-c24f-4428-a8e0-ac28ba7503e7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 482.556151] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c16abb-27d4-439d-bcad-50a6b09a1830 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 482.568849] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4413b6-eb68-41ca-ad23-5bd79d0e4fef {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 482.574624] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b52f70-de94-4348-84cf-09444b6323ec {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 482.603839] env[61629]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-c8e391de-2c42-4015-9461-6f61097c8fce {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 482.609143] env[61629]: DEBUG nova.virt.vmwareapi.driver [None req-9f8fdfa9-1707-4ff3-b4a2-57556492060c None None] Extension org.openstack.compute already exists. {{(pid=61629) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 482.611855] env[61629]: INFO nova.compute.provider_config [None req-9f8fdfa9-1707-4ff3-b4a2-57556492060c None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 483.115184] env[61629]: DEBUG nova.context [None req-9f8fdfa9-1707-4ff3-b4a2-57556492060c None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),8d3be7fd-1276-47c3-a3e5-b7bdd14ac4db(cell1) {{(pid=61629) load_cells /opt/stack/nova/nova/context.py:464}} [ 483.117288] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 483.117507] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 483.118190] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 483.118621] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Acquiring lock "8d3be7fd-1276-47c3-a3e5-b7bdd14ac4db" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 483.118803] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Lock "8d3be7fd-1276-47c3-a3e5-b7bdd14ac4db" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 483.119962] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Lock "8d3be7fd-1276-47c3-a3e5-b7bdd14ac4db" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 483.140519] env[61629]: INFO dbcounter [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Registered counter for database nova_cell0 [ 483.149104] env[61629]: INFO dbcounter [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Registered counter for database nova_cell1 [ 483.152579] env[61629]: DEBUG oslo_db.sqlalchemy.engines [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61629) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 483.152958] env[61629]: DEBUG oslo_db.sqlalchemy.engines [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61629) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 483.158141] env[61629]: ERROR nova.db.main.api [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 483.158141] env[61629]: result = function(*args, **kwargs) [ 483.158141] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 483.158141] env[61629]: return func(*args, **kwargs) [ 483.158141] env[61629]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 483.158141] env[61629]: result = fn(*args, **kwargs) [ 483.158141] env[61629]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 483.158141] env[61629]: return f(*args, **kwargs) [ 483.158141] env[61629]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 483.158141] env[61629]: return db.service_get_minimum_version(context, binaries) [ 483.158141] env[61629]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 483.158141] env[61629]: _check_db_access() [ 483.158141] env[61629]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 483.158141] env[61629]: stacktrace = ''.join(traceback.format_stack()) [ 483.158141] env[61629]: [ 483.159178] env[61629]: ERROR nova.db.main.api [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 483.159178] env[61629]: result = function(*args, **kwargs) [ 483.159178] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 483.159178] env[61629]: return func(*args, **kwargs) [ 483.159178] env[61629]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 483.159178] env[61629]: result = fn(*args, **kwargs) [ 483.159178] env[61629]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 483.159178] env[61629]: return f(*args, **kwargs) [ 483.159178] env[61629]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 483.159178] env[61629]: return db.service_get_minimum_version(context, binaries) [ 483.159178] env[61629]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 483.159178] env[61629]: _check_db_access() [ 483.159178] env[61629]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 483.159178] env[61629]: stacktrace = ''.join(traceback.format_stack()) [ 483.159178] env[61629]: [ 483.159571] env[61629]: WARNING nova.objects.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 483.159706] env[61629]: WARNING nova.objects.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Failed to get minimum service version for cell 8d3be7fd-1276-47c3-a3e5-b7bdd14ac4db [ 483.160151] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Acquiring lock "singleton_lock" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 483.160316] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Acquired lock "singleton_lock" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 483.160584] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Releasing lock "singleton_lock" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 483.160908] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Full set of CONF: {{(pid=61629) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 483.161064] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ******************************************************************************** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 483.161195] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Configuration options gathered from: {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 483.161330] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 483.161656] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 483.161795] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ================================================================================ {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 483.162015] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] allow_resize_to_same_host = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.162189] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] arq_binding_timeout = 300 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.162319] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] backdoor_port = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.162444] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] backdoor_socket = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.162603] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] block_device_allocate_retries = 60 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.162762] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] block_device_allocate_retries_interval = 3 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.162929] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cert = self.pem {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.163106] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.163273] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] compute_monitors = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.163436] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] config_dir = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.163602] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] config_drive_format = iso9660 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.163734] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.163895] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] config_source = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.164074] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] console_host = devstack {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.164239] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] control_exchange = nova {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.164391] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cpu_allocation_ratio = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.164571] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] daemon = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.164752] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] debug = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.164911] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] default_access_ip_network_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.165094] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] default_availability_zone = nova {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.165257] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] default_ephemeral_format = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.165414] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] default_green_pool_size = 1000 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.165646] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.165814] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] default_schedule_zone = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.165969] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] disk_allocation_ratio = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.166142] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] enable_new_services = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.166318] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] enabled_apis = ['osapi_compute'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.166481] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] enabled_ssl_apis = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.166638] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] flat_injected = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.166797] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] force_config_drive = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.166943] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] force_raw_images = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.167119] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] graceful_shutdown_timeout = 5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.167279] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] heal_instance_info_cache_interval = 60 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.167485] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] host = cpu-1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.167694] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.167869] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.168040] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.168258] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.168423] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] instance_build_timeout = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.168580] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] instance_delete_interval = 300 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.168745] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] instance_format = [instance: %(uuid)s] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.168908] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] instance_name_template = instance-%08x {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.169077] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] instance_usage_audit = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.169246] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] instance_usage_audit_period = month {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.169405] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.169572] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.169737] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] internal_service_availability_zone = internal {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.169895] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] key = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.170073] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] live_migration_retry_count = 30 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.170244] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] log_color = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.170408] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] log_config_append = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.170623] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.170801] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] log_dir = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.170962] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] log_file = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.171102] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] log_options = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.171263] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] log_rotate_interval = 1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.171430] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] log_rotate_interval_type = days {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.171703] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] log_rotation_type = none {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.171847] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.171976] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.172164] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.172331] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.172456] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.172622] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] long_rpc_timeout = 1800 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.172784] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] max_concurrent_builds = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.172937] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] max_concurrent_live_migrations = 1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.173106] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] max_concurrent_snapshots = 5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.173265] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] max_local_block_devices = 3 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.173420] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] max_logfile_count = 30 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.173577] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] max_logfile_size_mb = 200 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.173809] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] maximum_instance_delete_attempts = 5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.173998] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] metadata_listen = 0.0.0.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.174184] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] metadata_listen_port = 8775 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.174355] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] metadata_workers = 2 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.174515] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] migrate_max_retries = -1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.174684] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] mkisofs_cmd = genisoimage {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.174890] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.175032] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] my_ip = 10.180.1.21 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.175197] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] network_allocate_retries = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.175372] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.175546] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.175692] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] osapi_compute_listen_port = 8774 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.175856] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] osapi_compute_unique_server_name_scope = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.176031] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] osapi_compute_workers = 2 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.176196] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] password_length = 12 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.176352] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] periodic_enable = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.176505] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] periodic_fuzzy_delay = 60 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.176694] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] pointer_model = usbtablet {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.176876] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] preallocate_images = none {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.177047] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] publish_errors = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.177180] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] pybasedir = /opt/stack/nova {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.177336] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ram_allocation_ratio = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.177496] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] rate_limit_burst = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.177669] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] rate_limit_except_level = CRITICAL {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.177823] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] rate_limit_interval = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.177978] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] reboot_timeout = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.178146] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] reclaim_instance_interval = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.178301] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] record = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.178467] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] reimage_timeout_per_gb = 60 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.178630] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] report_interval = 120 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.178788] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] rescue_timeout = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.178943] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] reserved_host_cpus = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.179107] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] reserved_host_disk_mb = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.179265] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] reserved_host_memory_mb = 512 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.179422] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] reserved_huge_pages = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.179587] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] resize_confirm_window = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.179787] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] resize_fs_using_block_device = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.179953] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] resume_guests_state_on_host_boot = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.180137] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.180298] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] rpc_response_timeout = 60 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.180471] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] run_external_periodic_tasks = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.180654] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] running_deleted_instance_action = reap {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.180820] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.180979] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] running_deleted_instance_timeout = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.181151] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] scheduler_instance_sync_interval = 120 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.181318] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] service_down_time = 720 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.181487] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] servicegroup_driver = db {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.181735] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] shell_completion = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.181921] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] shelved_offload_time = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.182093] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] shelved_poll_interval = 3600 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.182262] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] shutdown_timeout = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.182423] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] source_is_ipv6 = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.182578] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ssl_only = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.182841] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.183015] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] sync_power_state_interval = 600 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.183184] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] sync_power_state_pool_size = 1000 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.183351] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] syslog_log_facility = LOG_USER {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.183508] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] tempdir = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.183669] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] timeout_nbd = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.183836] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] transport_url = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.183995] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] update_resources_interval = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.184169] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] use_cow_images = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.184327] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] use_eventlog = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.184484] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] use_journal = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.184639] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] use_json = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.184793] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] use_rootwrap_daemon = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.184945] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] use_stderr = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.185115] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] use_syslog = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.185272] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vcpu_pin_set = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.185434] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vif_plugging_is_fatal = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.185597] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vif_plugging_timeout = 300 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.185758] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] virt_mkfs = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.185918] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] volume_usage_poll_interval = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.186090] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] watch_log_file = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.186260] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] web = /usr/share/spice-html5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 483.186444] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.186609] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.186771] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.186937] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_concurrency.disable_process_locking = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.187487] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.187685] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.187858] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.188044] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.188220] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.188386] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.188579] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.auth_strategy = keystone {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.188762] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.compute_link_prefix = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.188940] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.189132] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.dhcp_domain = novalocal {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.189306] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.enable_instance_password = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.189474] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.glance_link_prefix = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.189641] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.189811] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.189974] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.instance_list_per_project_cells = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.190149] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.list_records_by_skipping_down_cells = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.190308] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.local_metadata_per_cell = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.190492] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.max_limit = 1000 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.190675] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.metadata_cache_expiration = 15 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.190850] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.neutron_default_tenant_id = default {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.191057] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.response_validation = warn {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.191248] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.use_neutron_default_nets = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.191420] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.191599] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.191861] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.192069] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.192247] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.vendordata_dynamic_targets = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.192414] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.vendordata_jsonfile_path = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.192596] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.192792] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.backend = dogpile.cache.memcached {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.192959] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.backend_argument = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.193147] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.config_prefix = cache.oslo {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.193315] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.dead_timeout = 60.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.193478] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.debug_cache_backend = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.193639] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.enable_retry_client = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.193799] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.enable_socket_keepalive = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.193966] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.enabled = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.194142] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.enforce_fips_mode = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.194304] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.expiration_time = 600 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.194465] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.hashclient_retry_attempts = 2 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.194630] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.194789] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.memcache_dead_retry = 300 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.194945] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.memcache_password = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.195120] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.195283] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.195443] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.memcache_pool_maxsize = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.195601] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.195762] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.memcache_sasl_enabled = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.195938] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.196118] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.196281] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.memcache_username = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.196443] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.proxies = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.196604] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.redis_db = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.196763] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.redis_password = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.196933] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.197121] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.197293] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.redis_server = localhost:6379 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.197456] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.redis_socket_timeout = 1.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.197615] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.redis_username = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.197778] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.retry_attempts = 2 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.197942] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.retry_delay = 0.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.198113] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.socket_keepalive_count = 1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.198274] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.socket_keepalive_idle = 1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.198433] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.socket_keepalive_interval = 1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.198591] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.tls_allowed_ciphers = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.198749] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.tls_cafile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.198905] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.tls_certfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.199095] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.tls_enabled = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.199262] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cache.tls_keyfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.199433] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cinder.auth_section = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.199604] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cinder.auth_type = password {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.199768] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cinder.cafile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.199944] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.200117] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cinder.certfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.200281] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cinder.collect_timing = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.200442] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cinder.cross_az_attach = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.200633] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cinder.debug = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.200803] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cinder.endpoint_template = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.200970] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cinder.http_retries = 3 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.201147] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cinder.insecure = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.201308] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cinder.keyfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.201504] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cinder.os_region_name = RegionOne {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.201697] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cinder.split_loggers = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.201938] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cinder.timeout = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.202143] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.202317] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] compute.cpu_dedicated_set = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.202478] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] compute.cpu_shared_set = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.202648] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] compute.image_type_exclude_list = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.202811] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.202972] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.203147] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.203308] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.203480] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.203642] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] compute.resource_provider_association_refresh = 300 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.203799] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.203957] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] compute.shutdown_retry_interval = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.204158] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.204337] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] conductor.workers = 2 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.204516] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] console.allowed_origins = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.204680] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] console.ssl_ciphers = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.204850] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] console.ssl_minimum_version = default {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.205029] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] consoleauth.enforce_session_timeout = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.205202] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] consoleauth.token_ttl = 600 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.205376] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.cafile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.205534] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.certfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.205697] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.collect_timing = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.205855] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.connect_retries = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.206017] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.connect_retry_delay = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.206178] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.endpoint_override = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.206338] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.insecure = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.206495] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.keyfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.206654] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.max_version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.206807] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.min_version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.206964] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.region_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.207134] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.retriable_status_codes = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.207291] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.service_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.207459] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.service_type = accelerator {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.207620] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.split_loggers = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.207778] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.status_code_retries = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.207934] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.status_code_retry_delay = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.208104] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.timeout = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.208283] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.208442] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] cyborg.version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.208621] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.backend = sqlalchemy {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.208791] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.connection = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.208954] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.connection_debug = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.209137] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.connection_parameters = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.209302] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.connection_recycle_time = 3600 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.209464] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.connection_trace = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.209623] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.db_inc_retry_interval = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.209783] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.db_max_retries = 20 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.209941] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.db_max_retry_interval = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.210113] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.db_retry_interval = 1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.210272] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.max_overflow = 50 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.210429] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.max_pool_size = 5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.210625] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.max_retries = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.210805] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.210966] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.mysql_wsrep_sync_wait = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.211136] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.pool_timeout = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.211297] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.retry_interval = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.211460] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.slave_connection = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.211658] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.sqlite_synchronous = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.211833] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] database.use_db_reconnect = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.212105] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.backend = sqlalchemy {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.212290] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.connection = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.212489] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.connection_debug = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.212663] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.connection_parameters = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.212839] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.connection_recycle_time = 3600 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.213026] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.connection_trace = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.213186] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.db_inc_retry_interval = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.213347] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.db_max_retries = 20 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.213506] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.db_max_retry_interval = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.213668] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.db_retry_interval = 1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.213825] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.max_overflow = 50 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.213982] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.max_pool_size = 5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.214156] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.max_retries = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.214327] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.214484] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.214642] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.pool_timeout = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.214800] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.retry_interval = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.214959] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.slave_connection = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.215138] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] api_database.sqlite_synchronous = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.215314] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] devices.enabled_mdev_types = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.215491] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.215663] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.215823] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ephemeral_storage_encryption.enabled = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.215984] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.216171] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.api_servers = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.216335] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.cafile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.216496] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.certfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.216658] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.collect_timing = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.216814] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.connect_retries = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.216971] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.connect_retry_delay = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.217144] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.debug = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.217313] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.default_trusted_certificate_ids = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.217473] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.enable_certificate_validation = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.217636] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.enable_rbd_download = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.217822] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.endpoint_override = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.218050] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.insecure = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.218229] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.keyfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.218389] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.max_version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.218546] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.min_version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.218708] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.num_retries = 3 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.218875] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.rbd_ceph_conf = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.219048] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.rbd_connect_timeout = 5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.219222] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.rbd_pool = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.219390] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.rbd_user = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.219551] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.region_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.219711] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.retriable_status_codes = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.219867] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.service_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.220044] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.service_type = image {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.220209] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.split_loggers = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.220374] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.status_code_retries = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.220574] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.status_code_retry_delay = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.220736] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.timeout = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.220920] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.221098] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.verify_glance_signatures = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.221262] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] glance.version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.221428] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] guestfs.debug = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.221613] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] mks.enabled = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.221977] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.222265] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] image_cache.manager_interval = 2400 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.222444] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] image_cache.precache_concurrency = 1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.222616] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] image_cache.remove_unused_base_images = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.222794] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.222955] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.224132] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] image_cache.subdirectory_name = _base {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.224132] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.api_max_retries = 60 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.224132] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.api_retry_interval = 2 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.224132] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.auth_section = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.224132] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.auth_type = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.224132] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.cafile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.224132] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.certfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.224364] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.collect_timing = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.224392] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.conductor_group = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.224553] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.connect_retries = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.224711] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.connect_retry_delay = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.224876] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.endpoint_override = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.225050] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.insecure = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.225210] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.keyfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.225367] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.max_version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.225526] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.min_version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.225689] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.peer_list = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.225845] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.region_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.226014] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.retriable_status_codes = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.226180] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.serial_console_state_timeout = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.226339] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.service_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.226507] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.service_type = baremetal {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230014] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.shard = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230014] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.split_loggers = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230014] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.status_code_retries = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230014] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.status_code_retry_delay = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230014] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.timeout = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230014] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230014] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ironic.version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230242] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230242] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] key_manager.fixed_key = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230242] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230242] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.barbican_api_version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230242] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.barbican_endpoint = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230242] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.barbican_endpoint_type = public {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230242] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.barbican_region_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230424] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.cafile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230424] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.certfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230424] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.collect_timing = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230424] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.insecure = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230424] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.keyfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230424] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.number_of_retries = 60 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230424] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.retry_delay = 1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230618] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.send_service_user_token = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230618] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.split_loggers = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230618] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.timeout = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230747] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.verify_ssl = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.230871] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican.verify_ssl_path = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.231042] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican_service_user.auth_section = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.231202] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican_service_user.auth_type = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.231357] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican_service_user.cafile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.231527] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican_service_user.certfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.231719] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican_service_user.collect_timing = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.231883] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican_service_user.insecure = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.232049] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican_service_user.keyfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.232298] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican_service_user.split_loggers = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.232468] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] barbican_service_user.timeout = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.232639] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vault.approle_role_id = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.232800] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vault.approle_secret_id = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.232964] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vault.kv_mountpoint = secret {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.233134] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vault.kv_path = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.233296] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vault.kv_version = 2 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.233452] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vault.namespace = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.233606] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vault.root_token_id = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.233758] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vault.ssl_ca_crt_file = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.233921] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vault.timeout = 60.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.234090] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vault.use_ssl = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.234255] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.234423] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.auth_section = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.234582] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.auth_type = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.234737] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.cafile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.234889] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.certfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.235059] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.collect_timing = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.235232] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.connect_retries = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.235401] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.connect_retry_delay = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.235613] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.endpoint_override = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.235800] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.insecure = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.235991] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.keyfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.236172] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.max_version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.236331] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.min_version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.236493] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.region_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.236653] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.retriable_status_codes = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.236813] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.service_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.236982] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.service_type = identity {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.237160] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.split_loggers = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.237317] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.status_code_retries = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.237474] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.status_code_retry_delay = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.237633] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.timeout = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.237810] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.237969] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] keystone.version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.238187] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.connection_uri = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.238350] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.cpu_mode = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.238516] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.238685] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.cpu_models = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.238855] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.cpu_power_governor_high = performance {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.239032] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.239203] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.cpu_power_management = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.239387] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.239611] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.device_detach_attempts = 8 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.239787] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.device_detach_timeout = 20 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.239956] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.disk_cachemodes = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.240132] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.disk_prefix = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.240299] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.enabled_perf_events = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.240469] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.file_backed_memory = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.240653] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.gid_maps = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.240822] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.hw_disk_discard = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.240979] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.hw_machine_type = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.241167] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.images_rbd_ceph_conf = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.241336] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.241501] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.241691] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.images_rbd_glance_store_name = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.241867] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.images_rbd_pool = rbd {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.242048] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.images_type = default {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.242282] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.images_volume_group = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.242471] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.inject_key = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.242640] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.inject_partition = -2 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.242803] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.inject_password = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.242963] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.iscsi_iface = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.243137] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.iser_use_multipath = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.243302] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.243464] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.243625] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.live_migration_downtime = 500 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.243786] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.243944] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.244118] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.live_migration_inbound_addr = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.244282] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.244448] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.244612] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.live_migration_scheme = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.244796] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.live_migration_timeout_action = abort {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.244962] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.live_migration_tunnelled = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.245136] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.live_migration_uri = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.245298] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.live_migration_with_native_tls = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.245455] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.max_queues = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.245616] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.245862] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.246037] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.nfs_mount_options = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.246339] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.246514] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.246680] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.246840] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.247007] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.247179] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.num_pcie_ports = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.247345] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.247510] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.pmem_namespaces = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.247670] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.quobyte_client_cfg = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.247965] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.248154] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.248321] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.248483] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.248645] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.rbd_secret_uuid = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.248802] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.rbd_user = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.248961] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.249146] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.249305] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.rescue_image_id = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.249465] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.rescue_kernel_id = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.249626] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.rescue_ramdisk_id = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.249793] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.249950] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.rx_queue_size = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.250131] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.smbfs_mount_options = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.250414] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.250628] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.snapshot_compression = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.250822] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.snapshot_image_format = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.251066] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.251240] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.sparse_logical_volumes = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.251406] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.swtpm_enabled = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.251606] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.swtpm_group = tss {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.251834] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.swtpm_user = tss {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.252038] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.sysinfo_serial = unique {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.252208] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.tb_cache_size = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.252449] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.tx_queue_size = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.252634] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.uid_maps = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.252836] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.use_virtio_for_bridges = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.253048] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.virt_type = kvm {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.253232] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.volume_clear = zero {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.253399] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.volume_clear_size = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.253568] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.volume_use_multipath = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.253731] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.vzstorage_cache_path = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.253934] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.254132] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.254304] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.254478] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.254759] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.254935] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.vzstorage_mount_user = stack {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.255115] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.255293] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.auth_section = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.255469] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.auth_type = password {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.255631] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.cafile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.255807] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.certfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.255984] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.collect_timing = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.256159] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.connect_retries = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.256318] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.connect_retry_delay = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.256490] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.default_floating_pool = public {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.256651] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.endpoint_override = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.256854] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.extension_sync_interval = 600 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.257062] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.http_retries = 3 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.257235] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.insecure = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.257398] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.keyfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.257560] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.max_version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.257734] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.257944] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.min_version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.258184] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.ovs_bridge = br-int {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.258364] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.physnets = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.258537] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.region_name = RegionOne {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.258701] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.retriable_status_codes = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.258886] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.service_metadata_proxy = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.259083] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.service_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.259262] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.service_type = network {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.259426] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.split_loggers = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.259586] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.status_code_retries = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.259746] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.status_code_retry_delay = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.259903] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.timeout = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.260096] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.260262] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] neutron.version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.260436] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] notifications.bdms_in_notifications = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.260645] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] notifications.default_level = INFO {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.260827] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] notifications.notification_format = unversioned {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.260996] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] notifications.notify_on_state_change = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.261189] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.261366] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] pci.alias = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.261539] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] pci.device_spec = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.261704] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] pci.report_in_placement = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.261875] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.auth_section = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.262060] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.auth_type = password {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.262233] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.262464] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.cafile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.262676] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.certfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.262857] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.collect_timing = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.263029] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.connect_retries = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.263194] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.connect_retry_delay = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.263354] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.default_domain_id = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.263513] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.default_domain_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.263669] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.domain_id = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.263828] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.domain_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.263986] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.endpoint_override = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.264162] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.insecure = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.264322] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.keyfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.264479] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.max_version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.264636] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.min_version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.264804] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.password = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.264964] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.project_domain_id = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.265147] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.project_domain_name = Default {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.265318] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.project_id = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.265492] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.project_name = service {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.265665] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.region_name = RegionOne {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.265825] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.retriable_status_codes = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.265984] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.service_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.266168] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.service_type = placement {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.266331] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.split_loggers = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.266490] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.status_code_retries = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.266658] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.status_code_retry_delay = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.266815] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.system_scope = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.266971] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.timeout = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.267140] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.trust_id = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.267299] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.user_domain_id = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.267466] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.user_domain_name = Default {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.267626] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.user_id = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.267796] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.username = nova {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.267977] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.268153] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] placement.version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.268329] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] quota.cores = 20 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.268508] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] quota.count_usage_from_placement = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.268698] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.268875] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] quota.injected_file_content_bytes = 10240 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.269052] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] quota.injected_file_path_length = 255 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.269222] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] quota.injected_files = 5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.269386] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] quota.instances = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.269549] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] quota.key_pairs = 100 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.269715] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] quota.metadata_items = 128 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.269878] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] quota.ram = 51200 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.270051] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] quota.recheck_quota = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.270221] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] quota.server_group_members = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.270385] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] quota.server_groups = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.270588] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.270760] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.270925] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] scheduler.image_metadata_prefilter = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.271099] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.271265] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] scheduler.max_attempts = 3 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.271426] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] scheduler.max_placement_results = 1000 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.271591] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.271753] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.271912] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.272097] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] scheduler.workers = 2 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.272271] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.272445] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.272713] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.272914] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.273105] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.273278] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.273443] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.273639] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.273809] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.host_subset_size = 1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.273973] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.274147] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.274311] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.274473] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.isolated_hosts = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.274664] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.isolated_images = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.274844] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.275013] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.275183] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.275346] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.pci_in_placement = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.275505] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.275666] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.275824] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.275984] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.276161] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.276322] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.276481] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.track_instance_changes = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.276658] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.276829] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] metrics.required = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.276993] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] metrics.weight_multiplier = 1.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.277171] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.277336] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] metrics.weight_setting = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.277652] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.277829] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] serial_console.enabled = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.278015] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] serial_console.port_range = 10000:20000 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.278193] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.278362] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.278540] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] serial_console.serialproxy_port = 6083 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.278720] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] service_user.auth_section = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.278893] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] service_user.auth_type = password {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.279064] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] service_user.cafile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.279225] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] service_user.certfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.279386] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] service_user.collect_timing = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.279546] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] service_user.insecure = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.279705] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] service_user.keyfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.279872] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] service_user.send_service_user_token = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.280043] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] service_user.split_loggers = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.280218] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] service_user.timeout = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.280387] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] spice.agent_enabled = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.280573] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] spice.enabled = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.280892] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.281097] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.281273] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] spice.html5proxy_port = 6082 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.281435] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] spice.image_compression = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.281593] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] spice.jpeg_compression = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.281750] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] spice.playback_compression = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.281910] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] spice.require_secure = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.282088] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] spice.server_listen = 127.0.0.1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.282260] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.282418] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] spice.streaming_mode = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.282640] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] spice.zlib_compression = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.282837] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] upgrade_levels.baseapi = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.283023] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] upgrade_levels.compute = auto {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.283192] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] upgrade_levels.conductor = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.283353] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] upgrade_levels.scheduler = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.283517] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.283681] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.283839] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vendordata_dynamic_auth.cafile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.283995] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vendordata_dynamic_auth.certfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.284174] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.284334] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vendordata_dynamic_auth.insecure = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.284491] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.284654] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.284809] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vendordata_dynamic_auth.timeout = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.284981] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.api_retry_count = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.285158] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.ca_file = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.285332] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.285499] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.cluster_name = testcl1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.285661] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.connection_pool_size = 10 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.285819] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.console_delay_seconds = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.285985] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.datastore_regex = ^datastore.* {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.286205] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.286380] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.host_password = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.286544] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.host_port = 443 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.286737] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.host_username = administrator@vsphere.local {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.286919] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.insecure = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.287096] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.integration_bridge = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.287265] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.maximum_objects = 100 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.287427] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.pbm_default_policy = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.287589] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.pbm_enabled = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.287752] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.pbm_wsdl_location = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.287920] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.288089] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.serial_port_proxy_uri = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.288250] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.serial_port_service_uri = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.288415] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.task_poll_interval = 0.5 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.288596] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.use_linked_clone = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.288775] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.vnc_keymap = en-us {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.288941] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.vnc_port = 5900 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.289117] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vmware.vnc_port_total = 10000 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.289306] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vnc.auth_schemes = ['none'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.289480] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vnc.enabled = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.289777] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.289964] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.290141] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vnc.novncproxy_port = 6080 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.290315] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vnc.server_listen = 127.0.0.1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.290499] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.290675] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vnc.vencrypt_ca_certs = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.290834] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vnc.vencrypt_client_cert = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.290989] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vnc.vencrypt_client_key = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.291181] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.291344] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.disable_deep_image_inspection = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.291504] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.291662] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.291816] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.291971] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.disable_rootwrap = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.292140] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.enable_numa_live_migration = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.292297] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.292453] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.292609] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.292850] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.libvirt_disable_apic = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.293029] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.293201] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.293379] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.293519] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.293683] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.293837] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.293995] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.294167] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.294326] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.294487] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.294666] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.294831] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] wsgi.client_socket_timeout = 900 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.294992] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] wsgi.default_pool_size = 1000 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.295169] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] wsgi.keep_alive = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.295335] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] wsgi.max_header_line = 16384 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.295496] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.295658] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] wsgi.ssl_ca_file = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.295814] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] wsgi.ssl_cert_file = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.295970] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] wsgi.ssl_key_file = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.296147] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] wsgi.tcp_keepidle = 600 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.296325] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.296491] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] zvm.ca_file = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.296651] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] zvm.cloud_connector_url = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.296938] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.297124] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] zvm.reachable_timeout = 300 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.297306] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_policy.enforce_new_defaults = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.297694] env[61629]: WARNING oslo_config.cfg [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 483.297879] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_policy.enforce_scope = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.298070] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_policy.policy_default_rule = default {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.298257] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.298432] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_policy.policy_file = policy.yaml {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.298632] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.298803] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.298965] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.299135] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.299297] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.299464] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.299641] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.299817] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] profiler.connection_string = messaging:// {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.299982] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] profiler.enabled = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.300165] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] profiler.es_doc_type = notification {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.300326] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] profiler.es_scroll_size = 10000 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.300511] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] profiler.es_scroll_time = 2m {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.300692] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] profiler.filter_error_trace = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.300865] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] profiler.hmac_keys = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.301050] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] profiler.sentinel_service_name = mymaster {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.301218] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] profiler.socket_timeout = 0.1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.301382] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] profiler.trace_requests = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.301555] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] profiler.trace_sqlalchemy = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.301726] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] profiler_jaeger.process_tags = {} {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.301918] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] profiler_jaeger.service_name_prefix = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.302107] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] profiler_otlp.service_name_prefix = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.302614] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] remote_debug.host = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.302614] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] remote_debug.port = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.302682] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.302928] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.303117] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.303287] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.303450] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.303612] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.303777] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.303939] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.304112] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.304288] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.304448] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.304621] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.304791] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.304961] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.305149] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.305317] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.305477] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.305683] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.305848] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.306014] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.306205] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.306367] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.306526] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.306694] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.306853] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.307014] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.307180] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.307339] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.307505] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.307669] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.ssl = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.307839] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.308012] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.308182] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.308350] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.308553] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.308747] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.308941] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.309167] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_notifications.retry = -1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.309368] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.309547] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.309724] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.auth_section = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.309887] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.auth_type = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.310070] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.cafile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.310235] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.certfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.310400] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.collect_timing = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.310586] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.connect_retries = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.310756] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.connect_retry_delay = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.310915] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.endpoint_id = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.311086] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.endpoint_override = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.311251] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.insecure = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.311411] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.keyfile = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.311584] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.max_version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.311837] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.min_version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.312036] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.region_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.312209] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.retriable_status_codes = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.312371] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.service_name = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.312531] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.service_type = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.312694] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.split_loggers = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.312948] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.status_code_retries = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.313152] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.status_code_retry_delay = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.313318] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.timeout = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.313479] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.valid_interfaces = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.313641] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_limit.version = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.313811] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_reports.file_event_handler = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.313976] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.314152] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] oslo_reports.log_dir = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.314326] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.314487] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.314647] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.314812] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.314976] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.315152] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.315323] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.315484] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vif_plug_ovs_privileged.group = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.315646] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.315811] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.315972] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.316143] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] vif_plug_ovs_privileged.user = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.316315] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.316493] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.316668] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.316837] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.317012] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.317186] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.317350] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.317514] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.317691] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.317862] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_vif_ovs.isolate_vif = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.318040] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.318212] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.318382] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.318591] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.318779] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] os_vif_ovs.per_port_bridge = False {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.318950] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] privsep_osbrick.capabilities = [21] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.319130] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] privsep_osbrick.group = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.319290] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] privsep_osbrick.helper_command = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.319456] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.319617] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.319777] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] privsep_osbrick.user = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.319950] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.320123] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] nova_sys_admin.group = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.320285] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] nova_sys_admin.helper_command = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.320451] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.320639] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.320805] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] nova_sys_admin.user = None {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 483.320934] env[61629]: DEBUG oslo_service.service [None req-abc510b2-a624-46d6-9fc8-c9f55d011ff4 None None] ******************************************************************************** {{(pid=61629) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 483.321445] env[61629]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 483.824990] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Getting list of instances from cluster (obj){ [ 483.824990] env[61629]: value = "domain-c8" [ 483.824990] env[61629]: _type = "ClusterComputeResource" [ 483.824990] env[61629]: } {{(pid=61629) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 483.827052] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d200b8c-b0ef-42e8-bc75-80b8e4cd61b4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 483.835202] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Got total of 0 instances {{(pid=61629) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 483.835726] env[61629]: WARNING nova.virt.vmwareapi.driver [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 483.836209] env[61629]: INFO nova.virt.node [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Generated node identity d075eff1-6f77-44a8-824e-16f3e03b4063 [ 483.836479] env[61629]: INFO nova.virt.node [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Wrote node identity d075eff1-6f77-44a8-824e-16f3e03b4063 to /opt/stack/data/n-cpu-1/compute_id [ 484.339544] env[61629]: WARNING nova.compute.manager [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Compute nodes ['d075eff1-6f77-44a8-824e-16f3e03b4063'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 485.345319] env[61629]: INFO nova.compute.manager [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 486.351053] env[61629]: WARNING nova.compute.manager [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 486.351398] env[61629]: DEBUG oslo_concurrency.lockutils [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 486.351553] env[61629]: DEBUG oslo_concurrency.lockutils [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 486.351713] env[61629]: DEBUG oslo_concurrency.lockutils [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 486.351859] env[61629]: DEBUG nova.compute.resource_tracker [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61629) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 486.352821] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381200ed-afef-4cf4-9b9a-ee662a495236 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.361263] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5c613f-4d0f-4301-8149-0abb665fc48a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.374490] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d231abdc-1815-4977-8ac5-de96303afb75 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.380838] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cfe8720-5417-4ac5-8598-c685eb2497a4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.410116] env[61629]: DEBUG nova.compute.resource_tracker [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181514MB free_disk=151GB free_vcpus=48 pci_devices=None {{(pid=61629) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 486.410428] env[61629]: DEBUG oslo_concurrency.lockutils [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 486.410705] env[61629]: DEBUG oslo_concurrency.lockutils [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 486.913721] env[61629]: WARNING nova.compute.resource_tracker [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] No compute node record for cpu-1:d075eff1-6f77-44a8-824e-16f3e03b4063: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host d075eff1-6f77-44a8-824e-16f3e03b4063 could not be found. [ 487.417966] env[61629]: INFO nova.compute.resource_tracker [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: d075eff1-6f77-44a8-824e-16f3e03b4063 [ 488.926866] env[61629]: DEBUG nova.compute.resource_tracker [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 488.927317] env[61629]: DEBUG nova.compute.resource_tracker [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 489.081183] env[61629]: INFO nova.scheduler.client.report [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] [req-f7b6be5c-e0f2-4ddb-9886-4a410f0623d1] Created resource provider record via placement API for resource provider with UUID d075eff1-6f77-44a8-824e-16f3e03b4063 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 489.098677] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bf2f00-1ae3-4d32-aca0-646089d4db30 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.106496] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6532ac-b39c-4a18-a1b6-95514080ae6b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.137016] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01efb869-2a69-4d1b-82a1-4777f5587ef9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.144302] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfde8b8d-6da6-4f4e-bbcb-a1df4b03af19 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 489.157750] env[61629]: DEBUG nova.compute.provider_tree [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 489.693985] env[61629]: DEBUG nova.scheduler.client.report [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Updated inventory for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 489.694232] env[61629]: DEBUG nova.compute.provider_tree [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Updating resource provider d075eff1-6f77-44a8-824e-16f3e03b4063 generation from 0 to 1 during operation: update_inventory {{(pid=61629) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 489.694370] env[61629]: DEBUG nova.compute.provider_tree [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 489.742057] env[61629]: DEBUG nova.compute.provider_tree [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Updating resource provider d075eff1-6f77-44a8-824e-16f3e03b4063 generation from 1 to 2 during operation: update_traits {{(pid=61629) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 490.247722] env[61629]: DEBUG nova.compute.resource_tracker [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61629) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 490.247722] env[61629]: DEBUG oslo_concurrency.lockutils [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.836s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 490.247722] env[61629]: DEBUG nova.service [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Creating RPC server for service compute {{(pid=61629) start /opt/stack/nova/nova/service.py:186}} [ 490.265060] env[61629]: DEBUG nova.service [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] Join ServiceGroup membership for this service compute {{(pid=61629) start /opt/stack/nova/nova/service.py:203}} [ 490.265276] env[61629]: DEBUG nova.servicegroup.drivers.db [None req-20ab50d8-10a2-4df0-8989-9ee672f3ad9e None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61629) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 511.268892] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._sync_power_states {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 511.772168] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Getting list of instances from cluster (obj){ [ 511.772168] env[61629]: value = "domain-c8" [ 511.772168] env[61629]: _type = "ClusterComputeResource" [ 511.772168] env[61629]: } {{(pid=61629) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 511.773847] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477f669f-a646-4126-abde-8f14149ea0cc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.782339] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Got total of 0 instances {{(pid=61629) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 511.782566] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 511.782873] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Getting list of instances from cluster (obj){ [ 511.782873] env[61629]: value = "domain-c8" [ 511.782873] env[61629]: _type = "ClusterComputeResource" [ 511.782873] env[61629]: } {{(pid=61629) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 511.783886] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5eb421-8e45-4af7-ae13-32d7496375b3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.792017] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Got total of 0 instances {{(pid=61629) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 522.732759] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Acquiring lock "113fe8e6-bc12-41fe-a405-cec2aa1a717e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 522.733075] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Lock "113fe8e6-bc12-41fe-a405-cec2aa1a717e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.235653] env[61629]: DEBUG nova.compute.manager [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 523.775434] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.775875] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.778054] env[61629]: INFO nova.compute.claims [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 524.847618] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a332c6-2bd8-419d-8a94-560e895490bb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.857681] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ad3233-50ab-4864-b73a-decb997a1b4b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.895618] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb2e580-3745-4b8a-b4ef-473d499e9de7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.903297] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6708f85c-2998-4482-a2d6-43042600c2f9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.919365] env[61629]: DEBUG nova.compute.provider_tree [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 525.422583] env[61629]: DEBUG nova.scheduler.client.report [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 525.930166] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.154s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 525.932924] env[61629]: DEBUG nova.compute.manager [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 526.437671] env[61629]: DEBUG nova.compute.utils [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 526.440698] env[61629]: DEBUG nova.compute.manager [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Not allocating networking since 'none' was specified. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 526.941914] env[61629]: DEBUG nova.compute.manager [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 527.956294] env[61629]: DEBUG nova.compute.manager [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 528.145230] env[61629]: DEBUG nova.virt.hardware [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 528.145503] env[61629]: DEBUG nova.virt.hardware [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 528.145786] env[61629]: DEBUG nova.virt.hardware [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 528.145879] env[61629]: DEBUG nova.virt.hardware [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 528.145975] env[61629]: DEBUG nova.virt.hardware [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 528.146213] env[61629]: DEBUG nova.virt.hardware [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 528.146634] env[61629]: DEBUG nova.virt.hardware [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 528.146634] env[61629]: DEBUG nova.virt.hardware [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 528.147090] env[61629]: DEBUG nova.virt.hardware [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 528.147382] env[61629]: DEBUG nova.virt.hardware [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 528.148163] env[61629]: DEBUG nova.virt.hardware [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 528.148447] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55743324-c6f8-44bd-9bd8-0d2742e816e8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.158158] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2434e605-7bf0-4607-84c0-8bc85d5c12bf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.176190] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0115cc7b-52c7-4b76-b6a7-cae1fad1e4e5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.196467] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Instance VIF info [] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 528.209774] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 528.210085] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2da153ce-38eb-4255-a762-5744eac8c29d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.223509] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Created folder: OpenStack in parent group-v4. [ 528.223634] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Creating folder: Project (ed191f6523054175b5ab96e9ff1c546c). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 528.223878] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5643e18-ea7f-47fd-bd07-f1cf7d6b5a65 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.233375] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Created folder: Project (ed191f6523054175b5ab96e9ff1c546c) in parent group-v288443. [ 528.233528] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Creating folder: Instances. Parent ref: group-v288444. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 528.233678] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bce50352-d5e8-4ec4-933f-00d69733adde {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.246675] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Created folder: Instances in parent group-v288444. [ 528.246942] env[61629]: DEBUG oslo.service.loopingcall [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 528.247707] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 528.247707] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98c976d4-8ab3-4db4-a2e9-a0ec86d0b431 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.271588] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 528.271588] env[61629]: value = "task-1353940" [ 528.271588] env[61629]: _type = "Task" [ 528.271588] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 528.280708] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1353940, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 528.524973] env[61629]: DEBUG oslo_concurrency.lockutils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Acquiring lock "20e445dd-663c-46e4-bc0a-f00e68ecd6cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.525282] env[61629]: DEBUG oslo_concurrency.lockutils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Lock "20e445dd-663c-46e4-bc0a-f00e68ecd6cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 528.782574] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1353940, 'name': CreateVM_Task, 'duration_secs': 0.269764} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 528.782917] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 528.784384] env[61629]: DEBUG oslo_vmware.service [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b5e874-9f76-4a6d-833d-2b1337629cf8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.791561] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 528.791561] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 528.793037] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 528.793037] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3b8e78a-fcba-41c5-b615-02bcdd3854b3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.799667] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for the task: (returnval){ [ 528.799667] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]526086ef-2f25-464a-5c7b-01fea294852f" [ 528.799667] env[61629]: _type = "Task" [ 528.799667] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 528.808675] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526086ef-2f25-464a-5c7b-01fea294852f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.028734] env[61629]: DEBUG nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 529.313159] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.313438] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 529.313756] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.313808] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.314226] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 529.317417] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bbef73ed-e694-4055-9e91-7fe32b1a750f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.339216] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 529.340717] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 529.342506] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd06facd-ad3e-4bcf-8517-d454b14c0fa5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.351817] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef666470-40ee-422d-96cd-0be6954176aa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.356800] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for the task: (returnval){ [ 529.356800] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52e7e408-944b-31e7-6ffa-f0651bb37a45" [ 529.356800] env[61629]: _type = "Task" [ 529.356800] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.369326] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52e7e408-944b-31e7-6ffa-f0651bb37a45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.561818] env[61629]: DEBUG oslo_concurrency.lockutils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.561818] env[61629]: DEBUG oslo_concurrency.lockutils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.565475] env[61629]: INFO nova.compute.claims [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 529.877289] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Acquiring lock "8a8a3aa7-ec40-4a8a-a823-718025428a59" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.877503] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Lock "8a8a3aa7-ec40-4a8a-a823-718025428a59" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.879297] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "abb87186-9951-4fbe-98b2-b595dd4fea12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.879502] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "abb87186-9951-4fbe-98b2-b595dd4fea12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.894722] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Preparing fetch location {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 529.894722] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Creating directory with path [datastore2] vmware_temp/32b9243c-2d1a-420e-bb49-7e6427cd0f83/7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 529.895179] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b90bc7d1-beb1-4d25-832c-ef8383cf2674 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.919401] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Created directory with path [datastore2] vmware_temp/32b9243c-2d1a-420e-bb49-7e6427cd0f83/7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 529.919992] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Fetch image to [datastore2] vmware_temp/32b9243c-2d1a-420e-bb49-7e6427cd0f83/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 529.920246] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Downloading image file data 7f036972-f3d8-47df-ae86-f8f2844bf80c to [datastore2] vmware_temp/32b9243c-2d1a-420e-bb49-7e6427cd0f83/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk on the data store datastore2 {{(pid=61629) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 529.922554] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2140b61-f49c-478f-89e1-f4450ed6d6b4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.938015] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd30f8f-3620-44bd-9b2b-0555620b2d6c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.940834] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "cf628773-7dcb-430a-b3ae-a5b62808e279" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.941067] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "cf628773-7dcb-430a-b3ae-a5b62808e279" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.951894] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b47704-2186-4e6f-b829-f5729feac01c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.992944] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676abd0e-8f25-464e-80a0-b5469f930009 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.999358] env[61629]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-edc229b6-4efb-4b4a-9bf9-22e6760ddcde {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.031021] env[61629]: DEBUG nova.virt.vmwareapi.images [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Downloading image file data 7f036972-f3d8-47df-ae86-f8f2844bf80c to the data store datastore2 {{(pid=61629) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 530.120651] env[61629]: DEBUG oslo_vmware.rw_handles [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/32b9243c-2d1a-420e-bb49-7e6427cd0f83/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61629) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 530.364363] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Acquiring lock "bfbff392-0dc0-47c7-ae58-22d922638ac8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.364606] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Lock "bfbff392-0dc0-47c7-ae58-22d922638ac8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.385706] env[61629]: DEBUG nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 530.401752] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 530.448856] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 530.795930] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf0ebde-c9da-43bb-b24c-97b608dda833 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.810012] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d1ec91f-2436-4479-9d53-f9cc2b93183f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.864886] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be6e8ed-d499-42a3-8002-a55733c1c75a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.869282] env[61629]: DEBUG oslo_vmware.rw_handles [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Completed reading data from the image iterator. {{(pid=61629) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 530.869583] env[61629]: DEBUG oslo_vmware.rw_handles [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/32b9243c-2d1a-420e-bb49-7e6427cd0f83/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61629) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 530.870130] env[61629]: DEBUG nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 530.879605] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1847784c-1306-4908-b5ff-e7d938454786 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.898502] env[61629]: DEBUG nova.compute.provider_tree [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 530.934234] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.937518] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.990138] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.995486] env[61629]: DEBUG nova.virt.vmwareapi.images [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Downloaded image file data 7f036972-f3d8-47df-ae86-f8f2844bf80c to vmware_temp/32b9243c-2d1a-420e-bb49-7e6427cd0f83/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk on the data store datastore2 {{(pid=61629) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 530.997261] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Caching image {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 530.997400] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Copying Virtual Disk [datastore2] vmware_temp/32b9243c-2d1a-420e-bb49-7e6427cd0f83/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk to [datastore2] vmware_temp/32b9243c-2d1a-420e-bb49-7e6427cd0f83/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 530.998819] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f452032b-cb66-4c41-8ba5-0f87e400aa6b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.008409] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for the task: (returnval){ [ 531.008409] env[61629]: value = "task-1353941" [ 531.008409] env[61629]: _type = "Task" [ 531.008409] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.022064] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353941, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.084845] env[61629]: DEBUG oslo_concurrency.lockutils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "d29660cc-47f8-4ca5-b21f-bcfd945abc03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.085235] env[61629]: DEBUG oslo_concurrency.lockutils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "d29660cc-47f8-4ca5-b21f-bcfd945abc03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.406478] env[61629]: DEBUG nova.scheduler.client.report [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 531.419101] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.523311] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353941, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.587653] env[61629]: DEBUG nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 531.913859] env[61629]: DEBUG oslo_concurrency.lockutils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.352s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.914308] env[61629]: DEBUG nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 531.917809] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.984s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.919620] env[61629]: INFO nova.compute.claims [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 532.019953] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353941, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.670496} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.020410] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Copied Virtual Disk [datastore2] vmware_temp/32b9243c-2d1a-420e-bb49-7e6427cd0f83/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk to [datastore2] vmware_temp/32b9243c-2d1a-420e-bb49-7e6427cd0f83/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 532.020602] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Deleting the datastore file [datastore2] vmware_temp/32b9243c-2d1a-420e-bb49-7e6427cd0f83/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 532.022021] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb96eef8-ea44-4641-8bcc-0ca9c98999a1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.029928] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for the task: (returnval){ [ 532.029928] env[61629]: value = "task-1353942" [ 532.029928] env[61629]: _type = "Task" [ 532.029928] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.041156] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353942, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.116669] env[61629]: DEBUG oslo_concurrency.lockutils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.428483] env[61629]: DEBUG nova.compute.utils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 532.430176] env[61629]: DEBUG nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 532.430176] env[61629]: DEBUG nova.network.neutron [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 532.541924] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353942, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0219} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.545640] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 532.545950] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Moving file from [datastore2] vmware_temp/32b9243c-2d1a-420e-bb49-7e6427cd0f83/7f036972-f3d8-47df-ae86-f8f2844bf80c to [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c. {{(pid=61629) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 532.546317] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-b19b1fff-732e-4c78-a717-ea35eb98b8e4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.553896] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for the task: (returnval){ [ 532.553896] env[61629]: value = "task-1353943" [ 532.553896] env[61629]: _type = "Task" [ 532.553896] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.565017] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353943, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.647021] env[61629]: DEBUG nova.policy [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1183b618b25b4372be4fa1fc63e9f124', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5a917d9027d4b97b1ddd211acbeaada', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 532.941582] env[61629]: DEBUG nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 533.069397] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353943, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.028245} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 533.069397] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] File moved {{(pid=61629) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 533.069397] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Cleaning up location [datastore2] vmware_temp/32b9243c-2d1a-420e-bb49-7e6427cd0f83 {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 533.069397] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Deleting the datastore file [datastore2] vmware_temp/32b9243c-2d1a-420e-bb49-7e6427cd0f83 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 533.069397] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-967f5783-338f-42b6-aef4-998beea57fe7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.072741] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925be080-11f4-4deb-98b8-f88649f9c093 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.076844] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for the task: (returnval){ [ 533.076844] env[61629]: value = "task-1353944" [ 533.076844] env[61629]: _type = "Task" [ 533.076844] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 533.087183] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353944, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.090225] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68ce98d-8b6f-4c93-bd4d-dc6f6a7b11e8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.123998] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0367f460-a838-4940-b034-df6edc9b83a7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.132186] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47da0568-1349-41a1-8a9a-703f0c6c273c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.146607] env[61629]: DEBUG nova.compute.provider_tree [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 533.449478] env[61629]: DEBUG oslo_concurrency.lockutils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Acquiring lock "f128e0a7-f67b-4800-bfd6-ec65c5042460" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.449987] env[61629]: DEBUG oslo_concurrency.lockutils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Lock "f128e0a7-f67b-4800-bfd6-ec65c5042460" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.479252] env[61629]: DEBUG nova.network.neutron [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Successfully created port: 4cedabf3-2049-4c0a-a3b3-f3fc62767ed6 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 533.588649] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353944, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024137} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 533.589391] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 533.590155] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d1a3022-b43e-4fe6-8fe5-cae7e02c6389 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.600052] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for the task: (returnval){ [ 533.600052] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52b06325-2b33-4166-7247-c59b2f7f742d" [ 533.600052] env[61629]: _type = "Task" [ 533.600052] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 533.610248] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52b06325-2b33-4166-7247-c59b2f7f742d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.653647] env[61629]: DEBUG nova.scheduler.client.report [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 533.959212] env[61629]: DEBUG nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 533.967494] env[61629]: DEBUG nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 534.002393] env[61629]: DEBUG nova.virt.hardware [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 534.002393] env[61629]: DEBUG nova.virt.hardware [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 534.002393] env[61629]: DEBUG nova.virt.hardware [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 534.002582] env[61629]: DEBUG nova.virt.hardware [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 534.002582] env[61629]: DEBUG nova.virt.hardware [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 534.002582] env[61629]: DEBUG nova.virt.hardware [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 534.002582] env[61629]: DEBUG nova.virt.hardware [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 534.002582] env[61629]: DEBUG nova.virt.hardware [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 534.002717] env[61629]: DEBUG nova.virt.hardware [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 534.005411] env[61629]: DEBUG nova.virt.hardware [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 534.005624] env[61629]: DEBUG nova.virt.hardware [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 534.006664] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f6cd44-6560-4238-8f75-6808e82ac673 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.017372] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc1b9ad-e91c-4256-a8ba-9aae6221200b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.115084] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52b06325-2b33-4166-7247-c59b2f7f742d, 'name': SearchDatastore_Task, 'duration_secs': 0.009129} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 534.115493] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.115742] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 113fe8e6-bc12-41fe-a405-cec2aa1a717e/113fe8e6-bc12-41fe-a405-cec2aa1a717e.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 534.116243] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52db9da8-f603-4c74-9a98-65f9cbac7490 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.124960] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for the task: (returnval){ [ 534.124960] env[61629]: value = "task-1353945" [ 534.124960] env[61629]: _type = "Task" [ 534.124960] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 534.135639] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353945, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.159128] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.241s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.159811] env[61629]: DEBUG nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 534.164377] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.226s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.165819] env[61629]: INFO nova.compute.claims [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 534.494741] env[61629]: DEBUG oslo_concurrency.lockutils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.638910] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353945, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.672248] env[61629]: DEBUG nova.compute.utils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 534.678647] env[61629]: DEBUG nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 534.678647] env[61629]: DEBUG nova.network.neutron [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 534.752752] env[61629]: DEBUG nova.policy [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6baa5fb35bd4f6099fefa267079af86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '80a41fcb89a44cc396520fb49311b56d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 535.142823] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353945, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.557328} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 535.142823] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 113fe8e6-bc12-41fe-a405-cec2aa1a717e/113fe8e6-bc12-41fe-a405-cec2aa1a717e.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 535.143169] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 535.143395] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7e7ac389-068d-42cb-8075-b28aa0d6c7cd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.152332] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for the task: (returnval){ [ 535.152332] env[61629]: value = "task-1353946" [ 535.152332] env[61629]: _type = "Task" [ 535.152332] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 535.160415] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353946, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 535.178919] env[61629]: DEBUG nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 535.304468] env[61629]: DEBUG nova.network.neutron [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Successfully created port: 5bb1633d-e41c-4ced-ab9f-e6019618b6f1 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 535.369161] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0ab2f0-b52d-4c70-b15f-d007535cd07e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.381819] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae88152e-27d8-404d-ad5f-1887f6cfac91 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.426429] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88584ab-2b9f-42e9-9ab7-fd22b1224afd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.438397] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd82911-d922-4c16-bc41-f4d95e38f313 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.452653] env[61629]: DEBUG nova.compute.provider_tree [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 535.545258] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "1a756eed-d5f2-4135-b522-ed06e20da1bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.546910] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "1a756eed-d5f2-4135-b522-ed06e20da1bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.664471] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353946, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10216} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 535.666104] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 535.666104] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f5d161-5631-485e-bd88-95de5c136616 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.689902] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 113fe8e6-bc12-41fe-a405-cec2aa1a717e/113fe8e6-bc12-41fe-a405-cec2aa1a717e.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 535.693354] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1bd46cc-dbdd-4dc8-878e-a204ca362d96 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.714021] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for the task: (returnval){ [ 535.714021] env[61629]: value = "task-1353947" [ 535.714021] env[61629]: _type = "Task" [ 535.714021] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 535.722228] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353947, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 535.956471] env[61629]: DEBUG nova.scheduler.client.report [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 536.050532] env[61629]: DEBUG nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 536.209311] env[61629]: DEBUG nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 536.226382] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353947, 'name': ReconfigVM_Task, 'duration_secs': 0.279302} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 536.226646] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 113fe8e6-bc12-41fe-a405-cec2aa1a717e/113fe8e6-bc12-41fe-a405-cec2aa1a717e.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 536.227321] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8baf0b08-f54b-4a62-9061-1ff5f28addf1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.234330] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for the task: (returnval){ [ 536.234330] env[61629]: value = "task-1353948" [ 536.234330] env[61629]: _type = "Task" [ 536.234330] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 536.240353] env[61629]: DEBUG nova.virt.hardware [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 536.240581] env[61629]: DEBUG nova.virt.hardware [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 536.240732] env[61629]: DEBUG nova.virt.hardware [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 536.240906] env[61629]: DEBUG nova.virt.hardware [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 536.241146] env[61629]: DEBUG nova.virt.hardware [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 536.241237] env[61629]: DEBUG nova.virt.hardware [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 536.241527] env[61629]: DEBUG nova.virt.hardware [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 536.241593] env[61629]: DEBUG nova.virt.hardware [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 536.241732] env[61629]: DEBUG nova.virt.hardware [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 536.241885] env[61629]: DEBUG nova.virt.hardware [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 536.242110] env[61629]: DEBUG nova.virt.hardware [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 536.243601] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c07fc50-8359-4f1a-864f-81228f7f5335 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.251190] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353948, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 536.256532] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c8fca9-92f3-4b77-809d-b19fc5320c26 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.463128] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.300s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 536.463653] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 536.468017] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.477s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.469714] env[61629]: INFO nova.compute.claims [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 536.579635] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.602680] env[61629]: ERROR nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5bb1633d-e41c-4ced-ab9f-e6019618b6f1, please check neutron logs for more information. [ 536.602680] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 536.602680] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.602680] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 536.602680] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.602680] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 536.602680] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.602680] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 536.602680] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.602680] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 536.602680] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.602680] env[61629]: ERROR nova.compute.manager raise self.value [ 536.602680] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.602680] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 536.602680] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.602680] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 536.603183] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.603183] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 536.603183] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5bb1633d-e41c-4ced-ab9f-e6019618b6f1, please check neutron logs for more information. [ 536.603183] env[61629]: ERROR nova.compute.manager [ 536.603183] env[61629]: Traceback (most recent call last): [ 536.603183] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 536.603183] env[61629]: listener.cb(fileno) [ 536.603183] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.603183] env[61629]: result = function(*args, **kwargs) [ 536.603183] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 536.603183] env[61629]: return func(*args, **kwargs) [ 536.603183] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.603183] env[61629]: raise e [ 536.603183] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.603183] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 536.603183] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.603183] env[61629]: created_port_ids = self._update_ports_for_instance( [ 536.603183] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.603183] env[61629]: with excutils.save_and_reraise_exception(): [ 536.603183] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.603183] env[61629]: self.force_reraise() [ 536.603183] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.603183] env[61629]: raise self.value [ 536.603183] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.603183] env[61629]: updated_port = self._update_port( [ 536.603183] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.603183] env[61629]: _ensure_no_port_binding_failure(port) [ 536.603183] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.603183] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 536.603866] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 5bb1633d-e41c-4ced-ab9f-e6019618b6f1, please check neutron logs for more information. [ 536.603866] env[61629]: Removing descriptor: 17 [ 536.605713] env[61629]: ERROR nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5bb1633d-e41c-4ced-ab9f-e6019618b6f1, please check neutron logs for more information. [ 536.605713] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Traceback (most recent call last): [ 536.605713] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 536.605713] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] yield resources [ 536.605713] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 536.605713] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] self.driver.spawn(context, instance, image_meta, [ 536.605713] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 536.605713] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] self._vmops.spawn(context, instance, image_meta, injected_files, [ 536.605713] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 536.605713] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] vm_ref = self.build_virtual_machine(instance, [ 536.605713] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 536.606079] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] vif_infos = vmwarevif.get_vif_info(self._session, [ 536.606079] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 536.606079] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] for vif in network_info: [ 536.606079] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 536.606079] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] return self._sync_wrapper(fn, *args, **kwargs) [ 536.606079] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 536.606079] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] self.wait() [ 536.606079] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 536.606079] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] self[:] = self._gt.wait() [ 536.606079] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 536.606079] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] return self._exit_event.wait() [ 536.606079] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 536.606079] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] result = hub.switch() [ 536.606412] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 536.606412] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] return self.greenlet.switch() [ 536.606412] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.606412] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] result = function(*args, **kwargs) [ 536.606412] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 536.606412] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] return func(*args, **kwargs) [ 536.606412] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.606412] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] raise e [ 536.606412] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.606412] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] nwinfo = self.network_api.allocate_for_instance( [ 536.606412] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.606412] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] created_port_ids = self._update_ports_for_instance( [ 536.606412] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.606752] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] with excutils.save_and_reraise_exception(): [ 536.606752] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.606752] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] self.force_reraise() [ 536.606752] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.606752] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] raise self.value [ 536.606752] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.606752] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] updated_port = self._update_port( [ 536.606752] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.606752] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] _ensure_no_port_binding_failure(port) [ 536.606752] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.606752] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] raise exception.PortBindingFailed(port_id=port['id']) [ 536.606752] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] nova.exception.PortBindingFailed: Binding failed for port 5bb1633d-e41c-4ced-ab9f-e6019618b6f1, please check neutron logs for more information. [ 536.606752] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] [ 536.607090] env[61629]: INFO nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Terminating instance [ 536.611850] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Acquiring lock "refresh_cache-8a8a3aa7-ec40-4a8a-a823-718025428a59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.611850] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Acquired lock "refresh_cache-8a8a3aa7-ec40-4a8a-a823-718025428a59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.611850] env[61629]: DEBUG nova.network.neutron [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 536.636088] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquiring lock "33029a57-19d2-45eb-b4ec-f50c47d3dc12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.636604] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Lock "33029a57-19d2-45eb-b4ec-f50c47d3dc12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.746745] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353948, 'name': Rename_Task, 'duration_secs': 0.132978} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 536.746745] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 536.746968] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d88d225c-490a-4a36-af3b-bcb009beda48 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.754009] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for the task: (returnval){ [ 536.754009] env[61629]: value = "task-1353949" [ 536.754009] env[61629]: _type = "Task" [ 536.754009] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 536.762658] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353949, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 536.974919] env[61629]: DEBUG nova.compute.utils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 536.984490] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 536.984801] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 537.046402] env[61629]: ERROR nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4cedabf3-2049-4c0a-a3b3-f3fc62767ed6, please check neutron logs for more information. [ 537.046402] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 537.046402] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 537.046402] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 537.046402] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 537.046402] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 537.046402] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 537.046402] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 537.046402] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 537.046402] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 537.046402] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 537.046402] env[61629]: ERROR nova.compute.manager raise self.value [ 537.046402] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 537.046402] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 537.046402] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 537.046402] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 537.046864] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 537.046864] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 537.046864] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4cedabf3-2049-4c0a-a3b3-f3fc62767ed6, please check neutron logs for more information. [ 537.046864] env[61629]: ERROR nova.compute.manager [ 537.046864] env[61629]: Traceback (most recent call last): [ 537.046864] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 537.046864] env[61629]: listener.cb(fileno) [ 537.046864] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 537.046864] env[61629]: result = function(*args, **kwargs) [ 537.046864] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 537.046864] env[61629]: return func(*args, **kwargs) [ 537.046864] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 537.046864] env[61629]: raise e [ 537.046864] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 537.046864] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 537.046864] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 537.046864] env[61629]: created_port_ids = self._update_ports_for_instance( [ 537.046864] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 537.046864] env[61629]: with excutils.save_and_reraise_exception(): [ 537.046864] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 537.046864] env[61629]: self.force_reraise() [ 537.046864] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 537.046864] env[61629]: raise self.value [ 537.046864] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 537.046864] env[61629]: updated_port = self._update_port( [ 537.046864] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 537.046864] env[61629]: _ensure_no_port_binding_failure(port) [ 537.046864] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 537.046864] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 537.047686] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 4cedabf3-2049-4c0a-a3b3-f3fc62767ed6, please check neutron logs for more information. [ 537.047686] env[61629]: Removing descriptor: 15 [ 537.047686] env[61629]: ERROR nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4cedabf3-2049-4c0a-a3b3-f3fc62767ed6, please check neutron logs for more information. [ 537.047686] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Traceback (most recent call last): [ 537.047686] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 537.047686] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] yield resources [ 537.047686] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 537.047686] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] self.driver.spawn(context, instance, image_meta, [ 537.047686] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 537.047686] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 537.047686] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 537.047686] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] vm_ref = self.build_virtual_machine(instance, [ 537.048052] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 537.048052] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] vif_infos = vmwarevif.get_vif_info(self._session, [ 537.048052] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 537.048052] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] for vif in network_info: [ 537.048052] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 537.048052] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] return self._sync_wrapper(fn, *args, **kwargs) [ 537.048052] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 537.048052] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] self.wait() [ 537.048052] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 537.048052] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] self[:] = self._gt.wait() [ 537.048052] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 537.048052] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] return self._exit_event.wait() [ 537.048052] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 537.048497] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] result = hub.switch() [ 537.048497] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 537.048497] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] return self.greenlet.switch() [ 537.048497] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 537.048497] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] result = function(*args, **kwargs) [ 537.048497] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 537.048497] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] return func(*args, **kwargs) [ 537.048497] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 537.048497] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] raise e [ 537.048497] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 537.048497] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] nwinfo = self.network_api.allocate_for_instance( [ 537.048497] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 537.048497] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] created_port_ids = self._update_ports_for_instance( [ 537.048886] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 537.048886] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] with excutils.save_and_reraise_exception(): [ 537.048886] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 537.048886] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] self.force_reraise() [ 537.048886] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 537.048886] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] raise self.value [ 537.048886] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 537.048886] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] updated_port = self._update_port( [ 537.048886] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 537.048886] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] _ensure_no_port_binding_failure(port) [ 537.048886] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 537.048886] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] raise exception.PortBindingFailed(port_id=port['id']) [ 537.049506] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] nova.exception.PortBindingFailed: Binding failed for port 4cedabf3-2049-4c0a-a3b3-f3fc62767ed6, please check neutron logs for more information. [ 537.049506] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] [ 537.049506] env[61629]: INFO nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Terminating instance [ 537.049752] env[61629]: DEBUG oslo_concurrency.lockutils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Acquiring lock "refresh_cache-20e445dd-663c-46e4-bc0a-f00e68ecd6cd" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.049978] env[61629]: DEBUG oslo_concurrency.lockutils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Acquired lock "refresh_cache-20e445dd-663c-46e4-bc0a-f00e68ecd6cd" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.050158] env[61629]: DEBUG nova.network.neutron [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 537.078811] env[61629]: DEBUG nova.policy [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5fe4bfac55cf40e79f7d54206afde0d6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d3e9a414c944234a52993d63b42e53f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 537.139032] env[61629]: DEBUG nova.compute.manager [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 537.157016] env[61629]: DEBUG nova.network.neutron [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 537.266853] env[61629]: DEBUG oslo_vmware.api [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353949, 'name': PowerOnVM_Task, 'duration_secs': 0.441041} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 537.267129] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 537.267583] env[61629]: INFO nova.compute.manager [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Took 9.31 seconds to spawn the instance on the hypervisor. [ 537.267786] env[61629]: DEBUG nova.compute.manager [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 537.268573] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be88ee4-9cd4-44ff-9006-518880f0ad25 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.373555] env[61629]: DEBUG nova.network.neutron [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.486381] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 537.541196] env[61629]: DEBUG nova.compute.manager [req-a52bb08e-de79-4828-86ac-ba28bcb3a30d req-32cbb7aa-aea2-4a24-a0f8-a2337a6ce7f2 service nova] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Received event network-changed-5bb1633d-e41c-4ced-ab9f-e6019618b6f1 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 537.541903] env[61629]: DEBUG nova.compute.manager [req-a52bb08e-de79-4828-86ac-ba28bcb3a30d req-32cbb7aa-aea2-4a24-a0f8-a2337a6ce7f2 service nova] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Refreshing instance network info cache due to event network-changed-5bb1633d-e41c-4ced-ab9f-e6019618b6f1. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 537.541903] env[61629]: DEBUG oslo_concurrency.lockutils [req-a52bb08e-de79-4828-86ac-ba28bcb3a30d req-32cbb7aa-aea2-4a24-a0f8-a2337a6ce7f2 service nova] Acquiring lock "refresh_cache-8a8a3aa7-ec40-4a8a-a823-718025428a59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.585012] env[61629]: DEBUG nova.network.neutron [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 537.601730] env[61629]: DEBUG nova.compute.manager [req-f482c5d8-3715-4764-8a76-21b06751782e req-a64e5d9e-29de-494b-995b-a254cd3e0ec5 service nova] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Received event network-changed-4cedabf3-2049-4c0a-a3b3-f3fc62767ed6 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 537.603272] env[61629]: DEBUG nova.compute.manager [req-f482c5d8-3715-4764-8a76-21b06751782e req-a64e5d9e-29de-494b-995b-a254cd3e0ec5 service nova] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Refreshing instance network info cache due to event network-changed-4cedabf3-2049-4c0a-a3b3-f3fc62767ed6. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 537.603272] env[61629]: DEBUG oslo_concurrency.lockutils [req-f482c5d8-3715-4764-8a76-21b06751782e req-a64e5d9e-29de-494b-995b-a254cd3e0ec5 service nova] Acquiring lock "refresh_cache-20e445dd-663c-46e4-bc0a-f00e68ecd6cd" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.673262] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.767152] env[61629]: DEBUG nova.network.neutron [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.798182] env[61629]: INFO nova.compute.manager [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Took 14.06 seconds to build instance. [ 537.876238] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Releasing lock "refresh_cache-8a8a3aa7-ec40-4a8a-a823-718025428a59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.876688] env[61629]: DEBUG nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 537.876870] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 537.877199] env[61629]: DEBUG oslo_concurrency.lockutils [req-a52bb08e-de79-4828-86ac-ba28bcb3a30d req-32cbb7aa-aea2-4a24-a0f8-a2337a6ce7f2 service nova] Acquired lock "refresh_cache-8a8a3aa7-ec40-4a8a-a823-718025428a59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.877430] env[61629]: DEBUG nova.network.neutron [req-a52bb08e-de79-4828-86ac-ba28bcb3a30d req-32cbb7aa-aea2-4a24-a0f8-a2337a6ce7f2 service nova] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Refreshing network info cache for port 5bb1633d-e41c-4ced-ab9f-e6019618b6f1 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 537.878725] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7d5210b2-d457-4ff6-8807-cbef57049081 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.884776] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70139d0d-2e35-4134-834a-d0997dfceef8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.909108] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a9623c-befa-441e-a7e6-bf9eba2ad956 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.923848] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509126ef-7217-451e-bb8e-e7c36b573c64 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.967877] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8a8a3aa7-ec40-4a8a-a823-718025428a59 could not be found. [ 537.968103] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 537.968282] env[61629]: INFO nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Took 0.09 seconds to destroy the instance on the hypervisor. [ 537.968515] env[61629]: DEBUG oslo.service.loopingcall [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 537.969274] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b20b797-8de0-4f91-ab7e-96a57ea0e110 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.972040] env[61629]: DEBUG nova.compute.manager [-] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 537.972146] env[61629]: DEBUG nova.network.neutron [-] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 537.982406] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-424351aa-e415-40d2-a43a-5dc7ab8c6d6d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.003911] env[61629]: DEBUG nova.compute.provider_tree [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 538.015159] env[61629]: DEBUG nova.network.neutron [-] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 538.215261] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquiring lock "733343f7-99e2-4e07-94eb-1b66458d799a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.218769] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Lock "733343f7-99e2-4e07-94eb-1b66458d799a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.270693] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Successfully created port: 9f30835a-0bc5-4945-8f94-dcc9db484a75 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 538.272947] env[61629]: DEBUG oslo_concurrency.lockutils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Releasing lock "refresh_cache-20e445dd-663c-46e4-bc0a-f00e68ecd6cd" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 538.273276] env[61629]: DEBUG nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 538.273465] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 538.273763] env[61629]: DEBUG oslo_concurrency.lockutils [req-f482c5d8-3715-4764-8a76-21b06751782e req-a64e5d9e-29de-494b-995b-a254cd3e0ec5 service nova] Acquired lock "refresh_cache-20e445dd-663c-46e4-bc0a-f00e68ecd6cd" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 538.273926] env[61629]: DEBUG nova.network.neutron [req-f482c5d8-3715-4764-8a76-21b06751782e req-a64e5d9e-29de-494b-995b-a254cd3e0ec5 service nova] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Refreshing network info cache for port 4cedabf3-2049-4c0a-a3b3-f3fc62767ed6 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 538.275296] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d17abf86-65fd-4651-b108-bd59cbd7ca5f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.285098] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47789a6a-d5cf-4f70-a77b-8755ee548024 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.310015] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cfde84d1-ab91-4b86-ba48-2d8480afaf36 tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Lock "113fe8e6-bc12-41fe-a405-cec2aa1a717e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.576s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.310015] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 20e445dd-663c-46e4-bc0a-f00e68ecd6cd could not be found. [ 538.310015] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 538.310550] env[61629]: INFO nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 538.310550] env[61629]: DEBUG oslo.service.loopingcall [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 538.311607] env[61629]: DEBUG nova.compute.manager [-] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 538.311646] env[61629]: DEBUG nova.network.neutron [-] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 538.423308] env[61629]: DEBUG nova.network.neutron [-] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 538.467603] env[61629]: DEBUG nova.network.neutron [req-a52bb08e-de79-4828-86ac-ba28bcb3a30d req-32cbb7aa-aea2-4a24-a0f8-a2337a6ce7f2 service nova] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 538.505974] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 538.511426] env[61629]: DEBUG nova.scheduler.client.report [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 538.517683] env[61629]: DEBUG nova.network.neutron [-] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.543561] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 538.544541] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 538.544541] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 538.544541] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 538.544541] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 538.544541] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 538.544787] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 538.544831] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 538.544954] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 538.545479] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 538.545680] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 538.546894] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1999522f-ef8b-4c3a-984d-042d2fb5d1be {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.557772] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb32d615-57c3-4a1d-b6b0-f214cf21c422 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.695218] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 538.696114] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 538.696201] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Starting heal instance info cache {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 538.696295] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Rebuilding the list of instances to heal {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 538.817712] env[61629]: DEBUG nova.compute.manager [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 538.825859] env[61629]: DEBUG nova.network.neutron [req-f482c5d8-3715-4764-8a76-21b06751782e req-a64e5d9e-29de-494b-995b-a254cd3e0ec5 service nova] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 538.925274] env[61629]: DEBUG nova.network.neutron [-] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.976845] env[61629]: DEBUG nova.compute.manager [None req-2bf07479-e909-4bdb-9bb6-3ea26f7f2f3b tempest-ServerDiagnosticsV248Test-1901940416 tempest-ServerDiagnosticsV248Test-1901940416-project-admin] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 538.977638] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90211759-b52e-47d0-9a78-af5e108f125e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.986390] env[61629]: INFO nova.compute.manager [None req-2bf07479-e909-4bdb-9bb6-3ea26f7f2f3b tempest-ServerDiagnosticsV248Test-1901940416 tempest-ServerDiagnosticsV248Test-1901940416-project-admin] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Retrieving diagnostics [ 538.986390] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce9bb85-2e6b-441d-bd11-e531382c2808 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.021053] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.553s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.021394] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 539.025065] env[61629]: INFO nova.compute.manager [-] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Took 1.05 seconds to deallocate network for instance. [ 539.026462] env[61629]: DEBUG nova.network.neutron [req-a52bb08e-de79-4828-86ac-ba28bcb3a30d req-32cbb7aa-aea2-4a24-a0f8-a2337a6ce7f2 service nova] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 539.029915] env[61629]: DEBUG nova.network.neutron [req-f482c5d8-3715-4764-8a76-21b06751782e req-a64e5d9e-29de-494b-995b-a254cd3e0ec5 service nova] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 539.032722] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.611s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.032722] env[61629]: INFO nova.compute.claims [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 539.038323] env[61629]: DEBUG nova.compute.claims [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 539.038501] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.200795] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 539.200795] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 539.200933] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 539.201034] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 539.295010] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "refresh_cache-113fe8e6-bc12-41fe-a405-cec2aa1a717e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.295281] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquired lock "refresh_cache-113fe8e6-bc12-41fe-a405-cec2aa1a717e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.295331] env[61629]: DEBUG nova.network.neutron [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Forcefully refreshing network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 539.296979] env[61629]: DEBUG nova.objects.instance [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lazy-loading 'info_cache' on Instance uuid 113fe8e6-bc12-41fe-a405-cec2aa1a717e {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 539.345678] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.435929] env[61629]: INFO nova.compute.manager [-] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Took 1.12 seconds to deallocate network for instance. [ 539.437046] env[61629]: DEBUG nova.compute.claims [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 539.437254] env[61629]: DEBUG oslo_concurrency.lockutils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.532415] env[61629]: DEBUG nova.compute.utils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 539.533815] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 539.533815] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 539.549995] env[61629]: DEBUG oslo_concurrency.lockutils [req-f482c5d8-3715-4764-8a76-21b06751782e req-a64e5d9e-29de-494b-995b-a254cd3e0ec5 service nova] Releasing lock "refresh_cache-20e445dd-663c-46e4-bc0a-f00e68ecd6cd" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.550533] env[61629]: DEBUG oslo_concurrency.lockutils [req-a52bb08e-de79-4828-86ac-ba28bcb3a30d req-32cbb7aa-aea2-4a24-a0f8-a2337a6ce7f2 service nova] Releasing lock "refresh_cache-8a8a3aa7-ec40-4a8a-a823-718025428a59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.765334] env[61629]: DEBUG nova.policy [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5fe4bfac55cf40e79f7d54206afde0d6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d3e9a414c944234a52993d63b42e53f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 540.039589] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 540.270271] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06f4937-80bd-43da-975a-3cf32611f84b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.276439] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf37df8e-fa6e-4ad6-b0f8-88890824733f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.310373] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c44326-b5d5-4e41-ad18-04493995a6cf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.315611] env[61629]: DEBUG nova.compute.manager [req-fd4a895e-a64d-4de6-96dc-74a9855c6bde req-154d039f-9a8b-4ad6-8855-a28c6ae7cc0b service nova] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Received event network-vif-deleted-5bb1633d-e41c-4ced-ab9f-e6019618b6f1 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 540.322881] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3f733d-3972-498b-99a3-79538e41334b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.328422] env[61629]: DEBUG nova.compute.manager [req-2ff5d5f4-5ea3-4c61-b5dc-69b8e3a180d5 req-90abf93d-6353-42bc-bae9-cb20b767c761 service nova] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Received event network-vif-deleted-4cedabf3-2049-4c0a-a3b3-f3fc62767ed6 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 540.338691] env[61629]: DEBUG nova.compute.provider_tree [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 540.366031] env[61629]: DEBUG nova.network.neutron [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 540.843444] env[61629]: DEBUG nova.scheduler.client.report [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 541.051434] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 541.092046] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 541.093611] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 541.093611] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 541.093611] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 541.093611] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 541.093611] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 541.093888] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 541.093888] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 541.093888] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 541.093888] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 541.094019] env[61629]: DEBUG nova.virt.hardware [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 541.095270] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa0776cc-cb6f-416e-896f-8cff91c0f276 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.103959] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0a5cc4-6805-463c-a0e1-54abe7478052 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.166219] env[61629]: DEBUG nova.network.neutron [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.348949] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.318s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 541.348949] env[61629]: DEBUG nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 541.352978] env[61629]: DEBUG oslo_concurrency.lockutils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.238s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.358092] env[61629]: INFO nova.compute.claims [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 541.442961] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Successfully created port: 77a65997-5a5d-45e6-8056-38b717c5802e {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 541.672099] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Releasing lock "refresh_cache-113fe8e6-bc12-41fe-a405-cec2aa1a717e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 541.672417] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Updated the network info_cache for instance {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 541.672560] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.675022] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.675022] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.675022] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.675022] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.675022] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.675022] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61629) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 541.675376] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager.update_available_resource {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.866261] env[61629]: DEBUG nova.compute.utils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 541.869934] env[61629]: DEBUG nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 541.869934] env[61629]: DEBUG nova.network.neutron [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 542.153449] env[61629]: DEBUG nova.policy [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '340035faba0945958ed5e1a40d7a23b4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8cc8f5c2444b45e393767e4669f01a98', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 542.181491] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.373059] env[61629]: DEBUG nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 542.656783] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12ded1f-256a-4c27-8da9-2347782e16fd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.672541] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec9dda8-c7a7-4c06-aa72-a748263fc7d2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.726905] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a649868-728f-4a6e-9081-ba73c3fb3c6c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.732739] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "62d7c997-cd38-43f5-a571-78a055ad05f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.733074] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "62d7c997-cd38-43f5-a571-78a055ad05f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.738860] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ca5133-9c72-4ab9-aa83-f20bfb518d56 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.753155] env[61629]: DEBUG nova.compute.provider_tree [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.256803] env[61629]: DEBUG nova.scheduler.client.report [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 543.276975] env[61629]: ERROR nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9f30835a-0bc5-4945-8f94-dcc9db484a75, please check neutron logs for more information. [ 543.276975] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 543.276975] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 543.276975] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 543.276975] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 543.276975] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 543.276975] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 543.276975] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 543.276975] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.276975] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 543.276975] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.276975] env[61629]: ERROR nova.compute.manager raise self.value [ 543.276975] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 543.276975] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 543.276975] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.276975] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 543.277617] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.277617] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 543.277617] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9f30835a-0bc5-4945-8f94-dcc9db484a75, please check neutron logs for more information. [ 543.277617] env[61629]: ERROR nova.compute.manager [ 543.279523] env[61629]: Traceback (most recent call last): [ 543.279523] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 543.279523] env[61629]: listener.cb(fileno) [ 543.279523] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.279523] env[61629]: result = function(*args, **kwargs) [ 543.279523] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 543.279523] env[61629]: return func(*args, **kwargs) [ 543.279523] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 543.279523] env[61629]: raise e [ 543.279523] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 543.279523] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 543.279523] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 543.279523] env[61629]: created_port_ids = self._update_ports_for_instance( [ 543.279523] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 543.279523] env[61629]: with excutils.save_and_reraise_exception(): [ 543.279523] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.279523] env[61629]: self.force_reraise() [ 543.279523] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.279523] env[61629]: raise self.value [ 543.279523] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 543.279523] env[61629]: updated_port = self._update_port( [ 543.279523] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.279523] env[61629]: _ensure_no_port_binding_failure(port) [ 543.279523] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.279523] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 543.279523] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 9f30835a-0bc5-4945-8f94-dcc9db484a75, please check neutron logs for more information. [ 543.279523] env[61629]: Removing descriptor: 17 [ 543.280578] env[61629]: ERROR nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9f30835a-0bc5-4945-8f94-dcc9db484a75, please check neutron logs for more information. [ 543.280578] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Traceback (most recent call last): [ 543.280578] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 543.280578] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] yield resources [ 543.280578] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 543.280578] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] self.driver.spawn(context, instance, image_meta, [ 543.280578] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 543.280578] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 543.280578] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 543.280578] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] vm_ref = self.build_virtual_machine(instance, [ 543.280578] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 543.280917] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] vif_infos = vmwarevif.get_vif_info(self._session, [ 543.280917] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 543.280917] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] for vif in network_info: [ 543.280917] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 543.280917] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] return self._sync_wrapper(fn, *args, **kwargs) [ 543.280917] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 543.280917] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] self.wait() [ 543.280917] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 543.280917] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] self[:] = self._gt.wait() [ 543.280917] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 543.280917] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] return self._exit_event.wait() [ 543.280917] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 543.280917] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] result = hub.switch() [ 543.282662] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 543.282662] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] return self.greenlet.switch() [ 543.282662] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.282662] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] result = function(*args, **kwargs) [ 543.282662] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 543.282662] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] return func(*args, **kwargs) [ 543.282662] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 543.282662] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] raise e [ 543.282662] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 543.282662] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] nwinfo = self.network_api.allocate_for_instance( [ 543.282662] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 543.282662] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] created_port_ids = self._update_ports_for_instance( [ 543.282662] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 543.283031] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] with excutils.save_and_reraise_exception(): [ 543.283031] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.283031] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] self.force_reraise() [ 543.283031] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.283031] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] raise self.value [ 543.283031] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 543.283031] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] updated_port = self._update_port( [ 543.283031] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.283031] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] _ensure_no_port_binding_failure(port) [ 543.283031] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.283031] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] raise exception.PortBindingFailed(port_id=port['id']) [ 543.283031] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] nova.exception.PortBindingFailed: Binding failed for port 9f30835a-0bc5-4945-8f94-dcc9db484a75, please check neutron logs for more information. [ 543.283031] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] [ 543.283402] env[61629]: INFO nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Terminating instance [ 543.287611] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "refresh_cache-abb87186-9951-4fbe-98b2-b595dd4fea12" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.287611] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquired lock "refresh_cache-abb87186-9951-4fbe-98b2-b595dd4fea12" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.287611] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 543.391948] env[61629]: DEBUG nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 543.419031] env[61629]: DEBUG nova.virt.hardware [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 543.419151] env[61629]: DEBUG nova.virt.hardware [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 543.419311] env[61629]: DEBUG nova.virt.hardware [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 543.419492] env[61629]: DEBUG nova.virt.hardware [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 543.419634] env[61629]: DEBUG nova.virt.hardware [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 543.419778] env[61629]: DEBUG nova.virt.hardware [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 543.419983] env[61629]: DEBUG nova.virt.hardware [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 543.420467] env[61629]: DEBUG nova.virt.hardware [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 543.420689] env[61629]: DEBUG nova.virt.hardware [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 543.420883] env[61629]: DEBUG nova.virt.hardware [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 543.421198] env[61629]: DEBUG nova.virt.hardware [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 543.422797] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d978816d-39b6-488d-9e18-90fb8ce2d060 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.433599] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3e826b-baee-4394-98d8-1e9f75cefb52 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.763067] env[61629]: DEBUG oslo_concurrency.lockutils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.409s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.763067] env[61629]: DEBUG nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 543.766435] env[61629]: DEBUG oslo_concurrency.lockutils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.272s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.771355] env[61629]: INFO nova.compute.claims [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 543.857423] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 544.111398] env[61629]: DEBUG nova.network.neutron [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Successfully created port: 55b2c3cb-0b0e-4df8-a6df-314f7cf77caa {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 544.165662] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.267644] env[61629]: DEBUG nova.compute.utils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 544.269098] env[61629]: DEBUG nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 544.269269] env[61629]: DEBUG nova.network.neutron [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 544.306106] env[61629]: DEBUG nova.compute.manager [req-6f70fbf5-20cf-4a46-864f-f57a49f714fc req-5ffaa021-1032-40a0-a9a4-023784a0dbd7 service nova] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Received event network-changed-9f30835a-0bc5-4945-8f94-dcc9db484a75 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 544.306106] env[61629]: DEBUG nova.compute.manager [req-6f70fbf5-20cf-4a46-864f-f57a49f714fc req-5ffaa021-1032-40a0-a9a4-023784a0dbd7 service nova] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Refreshing instance network info cache due to event network-changed-9f30835a-0bc5-4945-8f94-dcc9db484a75. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 544.306200] env[61629]: DEBUG oslo_concurrency.lockutils [req-6f70fbf5-20cf-4a46-864f-f57a49f714fc req-5ffaa021-1032-40a0-a9a4-023784a0dbd7 service nova] Acquiring lock "refresh_cache-abb87186-9951-4fbe-98b2-b595dd4fea12" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.406828] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Acquiring lock "8d858fe9-1c97-457b-87ba-2d405bb7dcc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.407109] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Lock "8d858fe9-1c97-457b-87ba-2d405bb7dcc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.448717] env[61629]: DEBUG nova.policy [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3cbfdc70fad64e8ab37fb9e0c1a10e0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bc538b7901b4d65a6107db047063183', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 544.670522] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Releasing lock "refresh_cache-abb87186-9951-4fbe-98b2-b595dd4fea12" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.670522] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 544.670522] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 544.670522] env[61629]: DEBUG oslo_concurrency.lockutils [req-6f70fbf5-20cf-4a46-864f-f57a49f714fc req-5ffaa021-1032-40a0-a9a4-023784a0dbd7 service nova] Acquired lock "refresh_cache-abb87186-9951-4fbe-98b2-b595dd4fea12" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.670522] env[61629]: DEBUG nova.network.neutron [req-6f70fbf5-20cf-4a46-864f-f57a49f714fc req-5ffaa021-1032-40a0-a9a4-023784a0dbd7 service nova] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Refreshing network info cache for port 9f30835a-0bc5-4945-8f94-dcc9db484a75 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 544.671870] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01a11a13-7249-4e2e-81be-63ce07166198 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.682407] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47cc086-1ed4-4496-8595-346e0c436c60 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.709576] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance abb87186-9951-4fbe-98b2-b595dd4fea12 could not be found. [ 544.710661] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 544.710661] env[61629]: INFO nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Took 0.04 seconds to destroy the instance on the hypervisor. [ 544.710661] env[61629]: DEBUG oslo.service.loopingcall [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 544.710843] env[61629]: DEBUG nova.compute.manager [-] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 544.711009] env[61629]: DEBUG nova.network.neutron [-] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 544.763737] env[61629]: DEBUG nova.network.neutron [-] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 544.772283] env[61629]: DEBUG nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 545.132527] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac73dba-cd0e-4af4-b6db-04ddfa587672 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.140374] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6fe6d9-f0d1-4ea3-b201-75d1f5c2f46a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.172668] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92786186-f3e2-4a51-8e9f-6b26c7cf4436 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.181125] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8039ba74-cf65-46d5-a967-7d74fbe149e7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.195035] env[61629]: DEBUG nova.compute.provider_tree [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.244994] env[61629]: DEBUG nova.network.neutron [req-6f70fbf5-20cf-4a46-864f-f57a49f714fc req-5ffaa021-1032-40a0-a9a4-023784a0dbd7 service nova] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 545.270202] env[61629]: DEBUG nova.network.neutron [-] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.701477] env[61629]: DEBUG nova.scheduler.client.report [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 545.772147] env[61629]: INFO nova.compute.manager [-] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Took 1.06 seconds to deallocate network for instance. [ 545.774547] env[61629]: DEBUG nova.compute.claims [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 545.774676] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.792624] env[61629]: DEBUG nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 545.829354] env[61629]: DEBUG nova.virt.hardware [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 545.834812] env[61629]: DEBUG nova.virt.hardware [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 545.834812] env[61629]: DEBUG nova.virt.hardware [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 545.834812] env[61629]: DEBUG nova.virt.hardware [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 545.834812] env[61629]: DEBUG nova.virt.hardware [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 545.834812] env[61629]: DEBUG nova.virt.hardware [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 545.834812] env[61629]: DEBUG nova.virt.hardware [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 545.835409] env[61629]: DEBUG nova.virt.hardware [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 545.835409] env[61629]: DEBUG nova.virt.hardware [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 545.835409] env[61629]: DEBUG nova.virt.hardware [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 545.835409] env[61629]: DEBUG nova.virt.hardware [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 545.835409] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38bb2ea3-033f-4702-a214-88ab2f237b0c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.842861] env[61629]: ERROR nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 77a65997-5a5d-45e6-8056-38b717c5802e, please check neutron logs for more information. [ 545.842861] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 545.842861] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 545.842861] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 545.842861] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 545.842861] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 545.842861] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 545.842861] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 545.842861] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.842861] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 545.842861] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.842861] env[61629]: ERROR nova.compute.manager raise self.value [ 545.842861] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 545.842861] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 545.842861] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.842861] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 545.843327] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.843327] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 545.843327] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 77a65997-5a5d-45e6-8056-38b717c5802e, please check neutron logs for more information. [ 545.843327] env[61629]: ERROR nova.compute.manager [ 545.843327] env[61629]: Traceback (most recent call last): [ 545.843327] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 545.843327] env[61629]: listener.cb(fileno) [ 545.843327] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 545.843327] env[61629]: result = function(*args, **kwargs) [ 545.843327] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 545.843327] env[61629]: return func(*args, **kwargs) [ 545.843327] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 545.843327] env[61629]: raise e [ 545.843327] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 545.843327] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 545.843327] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 545.843327] env[61629]: created_port_ids = self._update_ports_for_instance( [ 545.843327] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 545.843327] env[61629]: with excutils.save_and_reraise_exception(): [ 545.843327] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.843327] env[61629]: self.force_reraise() [ 545.843327] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.843327] env[61629]: raise self.value [ 545.843327] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 545.843327] env[61629]: updated_port = self._update_port( [ 545.843327] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.843327] env[61629]: _ensure_no_port_binding_failure(port) [ 545.843327] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.843327] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 545.844045] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 77a65997-5a5d-45e6-8056-38b717c5802e, please check neutron logs for more information. [ 545.844045] env[61629]: Removing descriptor: 15 [ 545.844791] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d16bf8-5f67-475c-885d-d0a613904564 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.849257] env[61629]: ERROR nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 77a65997-5a5d-45e6-8056-38b717c5802e, please check neutron logs for more information. [ 545.849257] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Traceback (most recent call last): [ 545.849257] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 545.849257] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] yield resources [ 545.849257] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 545.849257] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] self.driver.spawn(context, instance, image_meta, [ 545.849257] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 545.849257] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] self._vmops.spawn(context, instance, image_meta, injected_files, [ 545.849257] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 545.849257] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] vm_ref = self.build_virtual_machine(instance, [ 545.849257] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 545.849806] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] vif_infos = vmwarevif.get_vif_info(self._session, [ 545.849806] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 545.849806] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] for vif in network_info: [ 545.849806] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 545.849806] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] return self._sync_wrapper(fn, *args, **kwargs) [ 545.849806] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 545.849806] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] self.wait() [ 545.849806] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 545.849806] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] self[:] = self._gt.wait() [ 545.849806] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 545.849806] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] return self._exit_event.wait() [ 545.849806] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 545.849806] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] result = hub.switch() [ 545.850352] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 545.850352] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] return self.greenlet.switch() [ 545.850352] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 545.850352] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] result = function(*args, **kwargs) [ 545.850352] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 545.850352] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] return func(*args, **kwargs) [ 545.850352] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 545.850352] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] raise e [ 545.850352] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 545.850352] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] nwinfo = self.network_api.allocate_for_instance( [ 545.850352] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 545.850352] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] created_port_ids = self._update_ports_for_instance( [ 545.850352] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 545.851465] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] with excutils.save_and_reraise_exception(): [ 545.851465] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.851465] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] self.force_reraise() [ 545.851465] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.851465] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] raise self.value [ 545.851465] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 545.851465] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] updated_port = self._update_port( [ 545.851465] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.851465] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] _ensure_no_port_binding_failure(port) [ 545.851465] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.851465] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] raise exception.PortBindingFailed(port_id=port['id']) [ 545.851465] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] nova.exception.PortBindingFailed: Binding failed for port 77a65997-5a5d-45e6-8056-38b717c5802e, please check neutron logs for more information. [ 545.851465] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] [ 545.852361] env[61629]: INFO nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Terminating instance [ 545.852361] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "refresh_cache-cf628773-7dcb-430a-b3ae-a5b62808e279" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.852426] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquired lock "refresh_cache-cf628773-7dcb-430a-b3ae-a5b62808e279" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.852603] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 545.912655] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 545.995503] env[61629]: DEBUG nova.network.neutron [req-6f70fbf5-20cf-4a46-864f-f57a49f714fc req-5ffaa021-1032-40a0-a9a4-023784a0dbd7 service nova] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.049927] env[61629]: DEBUG nova.network.neutron [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Successfully created port: 1ad574ca-30cb-485b-b57a-83736bdfbe6d {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 546.209239] env[61629]: DEBUG oslo_concurrency.lockutils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.209775] env[61629]: DEBUG nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 546.212988] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.634s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.215781] env[61629]: INFO nova.compute.claims [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 546.307218] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.472775] env[61629]: DEBUG nova.compute.manager [req-d6c13c9e-8088-47f8-84f4-921af05e3f3b req-d5c3c48d-8561-4b41-80a5-314d12241c7c service nova] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Received event network-changed-77a65997-5a5d-45e6-8056-38b717c5802e {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 546.473561] env[61629]: DEBUG nova.compute.manager [req-d6c13c9e-8088-47f8-84f4-921af05e3f3b req-d5c3c48d-8561-4b41-80a5-314d12241c7c service nova] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Refreshing instance network info cache due to event network-changed-77a65997-5a5d-45e6-8056-38b717c5802e. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 546.473561] env[61629]: DEBUG oslo_concurrency.lockutils [req-d6c13c9e-8088-47f8-84f4-921af05e3f3b req-d5c3c48d-8561-4b41-80a5-314d12241c7c service nova] Acquiring lock "refresh_cache-cf628773-7dcb-430a-b3ae-a5b62808e279" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.502735] env[61629]: DEBUG oslo_concurrency.lockutils [req-6f70fbf5-20cf-4a46-864f-f57a49f714fc req-5ffaa021-1032-40a0-a9a4-023784a0dbd7 service nova] Releasing lock "refresh_cache-abb87186-9951-4fbe-98b2-b595dd4fea12" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.721710] env[61629]: DEBUG nova.compute.utils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 546.724347] env[61629]: DEBUG nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 546.724485] env[61629]: DEBUG nova.network.neutron [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 546.811230] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Releasing lock "refresh_cache-cf628773-7dcb-430a-b3ae-a5b62808e279" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.811230] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 546.811230] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 546.811230] env[61629]: DEBUG oslo_concurrency.lockutils [req-d6c13c9e-8088-47f8-84f4-921af05e3f3b req-d5c3c48d-8561-4b41-80a5-314d12241c7c service nova] Acquired lock "refresh_cache-cf628773-7dcb-430a-b3ae-a5b62808e279" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.811370] env[61629]: DEBUG nova.network.neutron [req-d6c13c9e-8088-47f8-84f4-921af05e3f3b req-d5c3c48d-8561-4b41-80a5-314d12241c7c service nova] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Refreshing network info cache for port 77a65997-5a5d-45e6-8056-38b717c5802e {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 546.812355] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a7a91524-23d5-4c6b-993f-6504f7f974c9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.827407] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ac362c-e6fd-497e-8ddc-48dc18e0a852 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.854028] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cf628773-7dcb-430a-b3ae-a5b62808e279 could not be found. [ 546.854280] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 546.854946] env[61629]: INFO nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Took 0.04 seconds to destroy the instance on the hypervisor. [ 546.854946] env[61629]: DEBUG oslo.service.loopingcall [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 546.854946] env[61629]: DEBUG nova.compute.manager [-] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 546.855115] env[61629]: DEBUG nova.network.neutron [-] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 546.909821] env[61629]: DEBUG nova.network.neutron [-] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 546.972800] env[61629]: DEBUG nova.policy [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c16c3a3f8ec4df195b6b16b356fde15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e6f6ddab35554ac1a839c7fad10aace3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 547.228100] env[61629]: DEBUG nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 547.417545] env[61629]: DEBUG nova.network.neutron [req-d6c13c9e-8088-47f8-84f4-921af05e3f3b req-d5c3c48d-8561-4b41-80a5-314d12241c7c service nova] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 547.417545] env[61629]: DEBUG nova.network.neutron [-] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.521012] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8226c3ac-ffda-4507-999f-e71f508b66ba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.533776] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb844be-5a0b-4751-9045-a0b93955963e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.572291] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d3b137-ee58-421e-a179-22e3be09f053 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.576932] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0852a1f8-0a9a-4698-ada1-cd8c688aff5f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.592489] env[61629]: DEBUG nova.compute.provider_tree [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 547.919817] env[61629]: INFO nova.compute.manager [-] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Took 1.06 seconds to deallocate network for instance. [ 547.925818] env[61629]: DEBUG nova.compute.claims [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 547.926360] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.945930] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Acquiring lock "ce3a7a32-424a-48a4-b5c5-2a25190943f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.948789] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Lock "ce3a7a32-424a-48a4-b5c5-2a25190943f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.986879] env[61629]: DEBUG nova.network.neutron [req-d6c13c9e-8088-47f8-84f4-921af05e3f3b req-d5c3c48d-8561-4b41-80a5-314d12241c7c service nova] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.094927] env[61629]: DEBUG nova.scheduler.client.report [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 548.133384] env[61629]: DEBUG nova.network.neutron [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Successfully created port: 9f109052-c649-4c70-b87d-33df40b955c4 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 548.242696] env[61629]: DEBUG nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 548.271638] env[61629]: DEBUG nova.virt.hardware [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 548.271885] env[61629]: DEBUG nova.virt.hardware [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 548.275848] env[61629]: DEBUG nova.virt.hardware [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 548.275848] env[61629]: DEBUG nova.virt.hardware [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 548.275848] env[61629]: DEBUG nova.virt.hardware [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 548.275848] env[61629]: DEBUG nova.virt.hardware [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 548.275848] env[61629]: DEBUG nova.virt.hardware [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 548.276312] env[61629]: DEBUG nova.virt.hardware [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 548.276312] env[61629]: DEBUG nova.virt.hardware [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 548.276312] env[61629]: DEBUG nova.virt.hardware [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 548.276312] env[61629]: DEBUG nova.virt.hardware [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 548.276312] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806a7086-b499-406f-be77-00f660fcfb65 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.285802] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6354f54-d2d2-446f-927a-7ad445fdc986 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.491573] env[61629]: DEBUG oslo_concurrency.lockutils [req-d6c13c9e-8088-47f8-84f4-921af05e3f3b req-d5c3c48d-8561-4b41-80a5-314d12241c7c service nova] Releasing lock "refresh_cache-cf628773-7dcb-430a-b3ae-a5b62808e279" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.529080] env[61629]: DEBUG nova.compute.manager [req-81fbc3ab-9a7e-472c-beba-207cccfdf708 req-05dea510-3a5c-4036-b855-0d4eeb6b30ba service nova] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Received event network-vif-deleted-9f30835a-0bc5-4945-8f94-dcc9db484a75 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 548.604531] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.604774] env[61629]: DEBUG nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 548.609289] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.935s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.611188] env[61629]: INFO nova.compute.claims [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 549.082419] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Acquiring lock "cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.082419] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Lock "cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.118783] env[61629]: DEBUG nova.compute.utils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 549.120449] env[61629]: DEBUG nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 549.120449] env[61629]: DEBUG nova.network.neutron [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 549.263422] env[61629]: DEBUG nova.policy [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be81178f7a914988a54581c283e2e76a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6d1f876ee054beb89ca0eb0776ddcd5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 549.361020] env[61629]: ERROR nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 55b2c3cb-0b0e-4df8-a6df-314f7cf77caa, please check neutron logs for more information. [ 549.361020] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 549.361020] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.361020] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 549.361020] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 549.361020] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 549.361020] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 549.361020] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 549.361020] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.361020] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 549.361020] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.361020] env[61629]: ERROR nova.compute.manager raise self.value [ 549.361020] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 549.361020] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 549.361020] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.361020] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 549.361495] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.361495] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 549.361495] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 55b2c3cb-0b0e-4df8-a6df-314f7cf77caa, please check neutron logs for more information. [ 549.361495] env[61629]: ERROR nova.compute.manager [ 549.361495] env[61629]: Traceback (most recent call last): [ 549.361495] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 549.361495] env[61629]: listener.cb(fileno) [ 549.361495] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.361495] env[61629]: result = function(*args, **kwargs) [ 549.361495] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 549.361495] env[61629]: return func(*args, **kwargs) [ 549.361495] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 549.361495] env[61629]: raise e [ 549.361495] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.361495] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 549.361495] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 549.361495] env[61629]: created_port_ids = self._update_ports_for_instance( [ 549.361495] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 549.361495] env[61629]: with excutils.save_and_reraise_exception(): [ 549.361495] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.361495] env[61629]: self.force_reraise() [ 549.361495] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.361495] env[61629]: raise self.value [ 549.361495] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 549.361495] env[61629]: updated_port = self._update_port( [ 549.361495] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.361495] env[61629]: _ensure_no_port_binding_failure(port) [ 549.361495] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.361495] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 549.362198] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 55b2c3cb-0b0e-4df8-a6df-314f7cf77caa, please check neutron logs for more information. [ 549.362198] env[61629]: Removing descriptor: 18 [ 549.362198] env[61629]: ERROR nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 55b2c3cb-0b0e-4df8-a6df-314f7cf77caa, please check neutron logs for more information. [ 549.362198] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Traceback (most recent call last): [ 549.362198] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 549.362198] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] yield resources [ 549.362198] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 549.362198] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] self.driver.spawn(context, instance, image_meta, [ 549.362198] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 549.362198] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 549.362198] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 549.362198] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] vm_ref = self.build_virtual_machine(instance, [ 549.362494] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 549.362494] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] vif_infos = vmwarevif.get_vif_info(self._session, [ 549.362494] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 549.362494] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] for vif in network_info: [ 549.362494] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 549.362494] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] return self._sync_wrapper(fn, *args, **kwargs) [ 549.362494] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 549.362494] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] self.wait() [ 549.362494] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 549.362494] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] self[:] = self._gt.wait() [ 549.362494] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 549.362494] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] return self._exit_event.wait() [ 549.362494] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 549.362868] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] result = hub.switch() [ 549.362868] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 549.362868] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] return self.greenlet.switch() [ 549.362868] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.362868] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] result = function(*args, **kwargs) [ 549.362868] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 549.362868] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] return func(*args, **kwargs) [ 549.362868] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 549.362868] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] raise e [ 549.362868] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.362868] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] nwinfo = self.network_api.allocate_for_instance( [ 549.362868] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 549.362868] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] created_port_ids = self._update_ports_for_instance( [ 549.363191] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 549.363191] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] with excutils.save_and_reraise_exception(): [ 549.363191] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.363191] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] self.force_reraise() [ 549.363191] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.363191] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] raise self.value [ 549.363191] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 549.363191] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] updated_port = self._update_port( [ 549.363191] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.363191] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] _ensure_no_port_binding_failure(port) [ 549.363191] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.363191] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] raise exception.PortBindingFailed(port_id=port['id']) [ 549.363487] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] nova.exception.PortBindingFailed: Binding failed for port 55b2c3cb-0b0e-4df8-a6df-314f7cf77caa, please check neutron logs for more information. [ 549.363487] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] [ 549.363487] env[61629]: INFO nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Terminating instance [ 549.366536] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Acquiring lock "refresh_cache-bfbff392-0dc0-47c7-ae58-22d922638ac8" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.366536] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Acquired lock "refresh_cache-bfbff392-0dc0-47c7-ae58-22d922638ac8" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.366536] env[61629]: DEBUG nova.network.neutron [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 549.629400] env[61629]: DEBUG nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 549.810497] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Acquiring lock "e40e1443-6d5d-41e1-9822-08b782e39d27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.811038] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Lock "e40e1443-6d5d-41e1-9822-08b782e39d27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.923624] env[61629]: DEBUG nova.network.neutron [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 550.013892] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8514fa6-aaa9-440a-9248-a07cce41b224 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.025180] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4a3034-55b3-4590-8351-5bc083b06ba0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.056338] env[61629]: DEBUG nova.network.neutron [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.058452] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94ddb55-7e0c-470b-8a10-a05d4e7a76e8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.068540] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b398d437-cbe1-4d5e-88ea-a9f125914016 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.083650] env[61629]: DEBUG nova.compute.provider_tree [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 550.134164] env[61629]: DEBUG nova.network.neutron [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Successfully created port: f4597226-4835-4be9-97ae-427377d35bcf {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 550.304061] env[61629]: DEBUG nova.compute.manager [req-52a932a6-a889-408c-a72e-00e30b221d7b req-5708522b-a3e2-4d56-9302-d26c45388b58 service nova] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Received event network-vif-deleted-77a65997-5a5d-45e6-8056-38b717c5802e {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 550.321194] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Acquiring lock "01c864cd-58a3-4061-836d-6a86ad37e4c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.322107] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Lock "01c864cd-58a3-4061-836d-6a86ad37e4c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.433325] env[61629]: ERROR nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1ad574ca-30cb-485b-b57a-83736bdfbe6d, please check neutron logs for more information. [ 550.433325] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 550.433325] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.433325] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 550.433325] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 550.433325] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 550.433325] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 550.433325] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 550.433325] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.433325] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 550.433325] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.433325] env[61629]: ERROR nova.compute.manager raise self.value [ 550.433325] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 550.433325] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 550.433325] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.433325] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 550.434023] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.434023] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 550.434023] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1ad574ca-30cb-485b-b57a-83736bdfbe6d, please check neutron logs for more information. [ 550.434023] env[61629]: ERROR nova.compute.manager [ 550.434023] env[61629]: Traceback (most recent call last): [ 550.434023] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 550.434023] env[61629]: listener.cb(fileno) [ 550.434023] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.434023] env[61629]: result = function(*args, **kwargs) [ 550.434023] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 550.434023] env[61629]: return func(*args, **kwargs) [ 550.434023] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 550.434023] env[61629]: raise e [ 550.434023] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.434023] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 550.434023] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 550.434023] env[61629]: created_port_ids = self._update_ports_for_instance( [ 550.434023] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 550.434023] env[61629]: with excutils.save_and_reraise_exception(): [ 550.434023] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.434023] env[61629]: self.force_reraise() [ 550.434023] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.434023] env[61629]: raise self.value [ 550.434023] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 550.434023] env[61629]: updated_port = self._update_port( [ 550.434023] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.434023] env[61629]: _ensure_no_port_binding_failure(port) [ 550.434023] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.434023] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 550.435376] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 1ad574ca-30cb-485b-b57a-83736bdfbe6d, please check neutron logs for more information. [ 550.435376] env[61629]: Removing descriptor: 17 [ 550.435376] env[61629]: ERROR nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1ad574ca-30cb-485b-b57a-83736bdfbe6d, please check neutron logs for more information. [ 550.435376] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Traceback (most recent call last): [ 550.435376] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 550.435376] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] yield resources [ 550.435376] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 550.435376] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] self.driver.spawn(context, instance, image_meta, [ 550.435376] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 550.435376] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] self._vmops.spawn(context, instance, image_meta, injected_files, [ 550.435376] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 550.435376] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] vm_ref = self.build_virtual_machine(instance, [ 550.436483] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 550.436483] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] vif_infos = vmwarevif.get_vif_info(self._session, [ 550.436483] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 550.436483] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] for vif in network_info: [ 550.436483] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 550.436483] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] return self._sync_wrapper(fn, *args, **kwargs) [ 550.436483] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 550.436483] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] self.wait() [ 550.436483] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 550.436483] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] self[:] = self._gt.wait() [ 550.436483] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 550.436483] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] return self._exit_event.wait() [ 550.436483] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 550.437278] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] result = hub.switch() [ 550.437278] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 550.437278] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] return self.greenlet.switch() [ 550.437278] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.437278] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] result = function(*args, **kwargs) [ 550.437278] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 550.437278] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] return func(*args, **kwargs) [ 550.437278] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 550.437278] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] raise e [ 550.437278] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.437278] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] nwinfo = self.network_api.allocate_for_instance( [ 550.437278] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 550.437278] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] created_port_ids = self._update_ports_for_instance( [ 550.438065] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 550.438065] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] with excutils.save_and_reraise_exception(): [ 550.438065] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.438065] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] self.force_reraise() [ 550.438065] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.438065] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] raise self.value [ 550.438065] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 550.438065] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] updated_port = self._update_port( [ 550.438065] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.438065] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] _ensure_no_port_binding_failure(port) [ 550.438065] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.438065] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] raise exception.PortBindingFailed(port_id=port['id']) [ 550.438687] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] nova.exception.PortBindingFailed: Binding failed for port 1ad574ca-30cb-485b-b57a-83736bdfbe6d, please check neutron logs for more information. [ 550.438687] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] [ 550.438687] env[61629]: INFO nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Terminating instance [ 550.438687] env[61629]: DEBUG oslo_concurrency.lockutils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "refresh_cache-d29660cc-47f8-4ca5-b21f-bcfd945abc03" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.438687] env[61629]: DEBUG oslo_concurrency.lockutils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquired lock "refresh_cache-d29660cc-47f8-4ca5-b21f-bcfd945abc03" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.438687] env[61629]: DEBUG nova.network.neutron [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 550.559342] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Releasing lock "refresh_cache-bfbff392-0dc0-47c7-ae58-22d922638ac8" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.559855] env[61629]: DEBUG nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 550.559927] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 550.560285] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8ce57dcd-cdfd-45da-8ada-34531f133526 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.574483] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714a8f26-bea5-40a6-a7c1-6f1e15e0f6c5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.590791] env[61629]: DEBUG nova.scheduler.client.report [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 550.605626] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bfbff392-0dc0-47c7-ae58-22d922638ac8 could not be found. [ 550.605887] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 550.606086] env[61629]: INFO nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Took 0.05 seconds to destroy the instance on the hypervisor. [ 550.606403] env[61629]: DEBUG oslo.service.loopingcall [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 550.607089] env[61629]: DEBUG nova.compute.manager [-] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 550.607089] env[61629]: DEBUG nova.network.neutron [-] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 550.641344] env[61629]: DEBUG nova.network.neutron [-] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 550.643724] env[61629]: DEBUG nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 550.684380] env[61629]: DEBUG nova.virt.hardware [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 550.684750] env[61629]: DEBUG nova.virt.hardware [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 550.684844] env[61629]: DEBUG nova.virt.hardware [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 550.684950] env[61629]: DEBUG nova.virt.hardware [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 550.685108] env[61629]: DEBUG nova.virt.hardware [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 550.685256] env[61629]: DEBUG nova.virt.hardware [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 550.685508] env[61629]: DEBUG nova.virt.hardware [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 550.685665] env[61629]: DEBUG nova.virt.hardware [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 550.685828] env[61629]: DEBUG nova.virt.hardware [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 550.685982] env[61629]: DEBUG nova.virt.hardware [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 550.686166] env[61629]: DEBUG nova.virt.hardware [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 550.687114] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190fd2e3-b4b3-46bf-8056-51e4cb3802f7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.696925] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350187e2-a290-4064-a513-357856478112 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.962865] env[61629]: DEBUG nova.network.neutron [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.051135] env[61629]: DEBUG nova.compute.manager [None req-0033f945-c9f2-44fc-b778-b250c50756f6 tempest-ServerDiagnosticsV248Test-1901940416 tempest-ServerDiagnosticsV248Test-1901940416-project-admin] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 551.052515] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca83a91f-4094-4ea1-a40f-80c3ec3e96e1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.063381] env[61629]: INFO nova.compute.manager [None req-0033f945-c9f2-44fc-b778-b250c50756f6 tempest-ServerDiagnosticsV248Test-1901940416 tempest-ServerDiagnosticsV248Test-1901940416-project-admin] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Retrieving diagnostics [ 551.064215] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22748acf-c948-4450-8e9c-35634305ecb6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.096702] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.098148] env[61629]: DEBUG nova.compute.manager [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 551.101119] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.063s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.126215] env[61629]: DEBUG nova.network.neutron [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.147983] env[61629]: DEBUG nova.network.neutron [-] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.603189] env[61629]: DEBUG nova.compute.utils [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 551.606929] env[61629]: DEBUG nova.compute.manager [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Not allocating networking since 'none' was specified. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 551.632891] env[61629]: DEBUG oslo_concurrency.lockutils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Releasing lock "refresh_cache-d29660cc-47f8-4ca5-b21f-bcfd945abc03" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.632891] env[61629]: DEBUG nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 551.632891] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 551.633840] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1c6c10b8-e481-4929-85e8-27ecb6f89ff7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.650385] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5551de01-d7fe-4487-945f-2949131e4eb8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.664102] env[61629]: INFO nova.compute.manager [-] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Took 1.06 seconds to deallocate network for instance. [ 551.671255] env[61629]: DEBUG nova.compute.claims [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 551.671697] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.682493] env[61629]: DEBUG oslo_concurrency.lockutils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Acquiring lock "9b950dc9-d79c-4b30-8b71-1910b46ffd9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.683444] env[61629]: DEBUG oslo_concurrency.lockutils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Lock "9b950dc9-d79c-4b30-8b71-1910b46ffd9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.684316] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d29660cc-47f8-4ca5-b21f-bcfd945abc03 could not be found. [ 551.684734] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 551.685159] env[61629]: INFO nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Took 0.05 seconds to destroy the instance on the hypervisor. [ 551.686024] env[61629]: DEBUG oslo.service.loopingcall [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 551.686024] env[61629]: DEBUG nova.compute.manager [-] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 551.686367] env[61629]: DEBUG nova.network.neutron [-] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 551.717777] env[61629]: DEBUG nova.network.neutron [-] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.942486] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a37970-89b9-453c-b7bc-a54d677f4409 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.951757] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913eb0b5-a74e-4cb8-a3bc-3d9de83ae2da {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.984633] env[61629]: ERROR nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f4597226-4835-4be9-97ae-427377d35bcf, please check neutron logs for more information. [ 551.984633] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 551.984633] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 551.984633] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 551.984633] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 551.984633] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 551.984633] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 551.984633] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 551.984633] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 551.984633] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 551.984633] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 551.984633] env[61629]: ERROR nova.compute.manager raise self.value [ 551.984633] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 551.984633] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 551.984633] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 551.984633] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 551.985165] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 551.985165] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 551.985165] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f4597226-4835-4be9-97ae-427377d35bcf, please check neutron logs for more information. [ 551.985165] env[61629]: ERROR nova.compute.manager [ 551.985165] env[61629]: Traceback (most recent call last): [ 551.985165] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 551.985165] env[61629]: listener.cb(fileno) [ 551.985165] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 551.985165] env[61629]: result = function(*args, **kwargs) [ 551.985165] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 551.985165] env[61629]: return func(*args, **kwargs) [ 551.985165] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 551.985165] env[61629]: raise e [ 551.985165] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 551.985165] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 551.985165] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 551.985165] env[61629]: created_port_ids = self._update_ports_for_instance( [ 551.985165] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 551.985165] env[61629]: with excutils.save_and_reraise_exception(): [ 551.985165] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 551.985165] env[61629]: self.force_reraise() [ 551.985165] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 551.985165] env[61629]: raise self.value [ 551.985165] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 551.985165] env[61629]: updated_port = self._update_port( [ 551.985165] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 551.985165] env[61629]: _ensure_no_port_binding_failure(port) [ 551.985165] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 551.985165] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 551.985929] env[61629]: nova.exception.PortBindingFailed: Binding failed for port f4597226-4835-4be9-97ae-427377d35bcf, please check neutron logs for more information. [ 551.985929] env[61629]: Removing descriptor: 21 [ 551.985929] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16306f1a-cfcb-4e7c-ae7f-3bc598b2240d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.989325] env[61629]: ERROR nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f4597226-4835-4be9-97ae-427377d35bcf, please check neutron logs for more information. [ 551.989325] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Traceback (most recent call last): [ 551.989325] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 551.989325] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] yield resources [ 551.989325] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 551.989325] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] self.driver.spawn(context, instance, image_meta, [ 551.989325] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 551.989325] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 551.989325] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 551.989325] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] vm_ref = self.build_virtual_machine(instance, [ 551.989325] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 551.989662] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] vif_infos = vmwarevif.get_vif_info(self._session, [ 551.989662] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 551.989662] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] for vif in network_info: [ 551.989662] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 551.989662] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] return self._sync_wrapper(fn, *args, **kwargs) [ 551.989662] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 551.989662] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] self.wait() [ 551.989662] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 551.989662] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] self[:] = self._gt.wait() [ 551.989662] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 551.989662] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] return self._exit_event.wait() [ 551.989662] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 551.989662] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] result = hub.switch() [ 551.989977] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 551.989977] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] return self.greenlet.switch() [ 551.989977] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 551.989977] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] result = function(*args, **kwargs) [ 551.989977] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 551.989977] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] return func(*args, **kwargs) [ 551.989977] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 551.989977] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] raise e [ 551.989977] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 551.989977] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] nwinfo = self.network_api.allocate_for_instance( [ 551.989977] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 551.989977] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] created_port_ids = self._update_ports_for_instance( [ 551.989977] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 551.990320] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] with excutils.save_and_reraise_exception(): [ 551.990320] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 551.990320] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] self.force_reraise() [ 551.990320] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 551.990320] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] raise self.value [ 551.990320] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 551.990320] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] updated_port = self._update_port( [ 551.990320] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 551.990320] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] _ensure_no_port_binding_failure(port) [ 551.990320] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 551.990320] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] raise exception.PortBindingFailed(port_id=port['id']) [ 551.990320] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] nova.exception.PortBindingFailed: Binding failed for port f4597226-4835-4be9-97ae-427377d35bcf, please check neutron logs for more information. [ 551.990320] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] [ 551.990638] env[61629]: INFO nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Terminating instance [ 551.993076] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "refresh_cache-1a756eed-d5f2-4135-b522-ed06e20da1bc" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.993234] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "refresh_cache-1a756eed-d5f2-4135-b522-ed06e20da1bc" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.993397] env[61629]: DEBUG nova.network.neutron [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 551.997907] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce1277b-1b54-46f4-b9bd-c6fd1c747b53 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.017104] env[61629]: DEBUG nova.compute.provider_tree [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 552.104984] env[61629]: DEBUG nova.compute.manager [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Received event network-changed-55b2c3cb-0b0e-4df8-a6df-314f7cf77caa {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 552.104984] env[61629]: DEBUG nova.compute.manager [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Refreshing instance network info cache due to event network-changed-55b2c3cb-0b0e-4df8-a6df-314f7cf77caa. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 552.104984] env[61629]: DEBUG oslo_concurrency.lockutils [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] Acquiring lock "refresh_cache-bfbff392-0dc0-47c7-ae58-22d922638ac8" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.104984] env[61629]: DEBUG oslo_concurrency.lockutils [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] Acquired lock "refresh_cache-bfbff392-0dc0-47c7-ae58-22d922638ac8" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.104984] env[61629]: DEBUG nova.network.neutron [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Refreshing network info cache for port 55b2c3cb-0b0e-4df8-a6df-314f7cf77caa {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 552.112084] env[61629]: DEBUG nova.compute.manager [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 552.148333] env[61629]: ERROR nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9f109052-c649-4c70-b87d-33df40b955c4, please check neutron logs for more information. [ 552.148333] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 552.148333] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.148333] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 552.148333] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 552.148333] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 552.148333] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 552.148333] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 552.148333] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.148333] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 552.148333] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.148333] env[61629]: ERROR nova.compute.manager raise self.value [ 552.148333] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 552.148333] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 552.148333] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.148333] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 552.149049] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.149049] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 552.149049] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9f109052-c649-4c70-b87d-33df40b955c4, please check neutron logs for more information. [ 552.149049] env[61629]: ERROR nova.compute.manager [ 552.149049] env[61629]: Traceback (most recent call last): [ 552.149049] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 552.149049] env[61629]: listener.cb(fileno) [ 552.149049] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.149049] env[61629]: result = function(*args, **kwargs) [ 552.149049] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 552.149049] env[61629]: return func(*args, **kwargs) [ 552.149049] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 552.149049] env[61629]: raise e [ 552.149049] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.149049] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 552.149049] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 552.149049] env[61629]: created_port_ids = self._update_ports_for_instance( [ 552.149049] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 552.149049] env[61629]: with excutils.save_and_reraise_exception(): [ 552.149049] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.149049] env[61629]: self.force_reraise() [ 552.149049] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.149049] env[61629]: raise self.value [ 552.149049] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 552.149049] env[61629]: updated_port = self._update_port( [ 552.149049] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.149049] env[61629]: _ensure_no_port_binding_failure(port) [ 552.149049] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.149049] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 552.149832] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 9f109052-c649-4c70-b87d-33df40b955c4, please check neutron logs for more information. [ 552.149832] env[61629]: Removing descriptor: 15 [ 552.149832] env[61629]: ERROR nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9f109052-c649-4c70-b87d-33df40b955c4, please check neutron logs for more information. [ 552.149832] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Traceback (most recent call last): [ 552.149832] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 552.149832] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] yield resources [ 552.149832] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 552.149832] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] self.driver.spawn(context, instance, image_meta, [ 552.149832] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 552.149832] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] self._vmops.spawn(context, instance, image_meta, injected_files, [ 552.149832] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 552.149832] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] vm_ref = self.build_virtual_machine(instance, [ 552.152129] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 552.152129] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] vif_infos = vmwarevif.get_vif_info(self._session, [ 552.152129] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 552.152129] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] for vif in network_info: [ 552.152129] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 552.152129] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] return self._sync_wrapper(fn, *args, **kwargs) [ 552.152129] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 552.152129] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] self.wait() [ 552.152129] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 552.152129] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] self[:] = self._gt.wait() [ 552.152129] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 552.152129] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] return self._exit_event.wait() [ 552.152129] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 552.152544] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] result = hub.switch() [ 552.152544] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 552.152544] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] return self.greenlet.switch() [ 552.152544] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.152544] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] result = function(*args, **kwargs) [ 552.152544] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 552.152544] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] return func(*args, **kwargs) [ 552.152544] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 552.152544] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] raise e [ 552.152544] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.152544] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] nwinfo = self.network_api.allocate_for_instance( [ 552.152544] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 552.152544] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] created_port_ids = self._update_ports_for_instance( [ 552.152986] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 552.152986] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] with excutils.save_and_reraise_exception(): [ 552.152986] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.152986] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] self.force_reraise() [ 552.152986] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.152986] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] raise self.value [ 552.152986] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 552.152986] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] updated_port = self._update_port( [ 552.152986] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.152986] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] _ensure_no_port_binding_failure(port) [ 552.152986] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.152986] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] raise exception.PortBindingFailed(port_id=port['id']) [ 552.153312] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] nova.exception.PortBindingFailed: Binding failed for port 9f109052-c649-4c70-b87d-33df40b955c4, please check neutron logs for more information. [ 552.153312] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] [ 552.153312] env[61629]: INFO nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Terminating instance [ 552.156351] env[61629]: DEBUG oslo_concurrency.lockutils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Acquiring lock "refresh_cache-f128e0a7-f67b-4800-bfd6-ec65c5042460" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.156519] env[61629]: DEBUG oslo_concurrency.lockutils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Acquired lock "refresh_cache-f128e0a7-f67b-4800-bfd6-ec65c5042460" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.156833] env[61629]: DEBUG nova.network.neutron [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 552.221306] env[61629]: DEBUG nova.network.neutron [-] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.520177] env[61629]: DEBUG nova.scheduler.client.report [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 552.524630] env[61629]: DEBUG nova.network.neutron [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 552.628041] env[61629]: DEBUG nova.network.neutron [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 552.663239] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Acquiring lock "b2ec37a4-09f6-428c-bca9-1ec121c9c390" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.665041] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Lock "b2ec37a4-09f6-428c-bca9-1ec121c9c390" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.690659] env[61629]: DEBUG nova.network.neutron [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 552.727728] env[61629]: INFO nova.compute.manager [-] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Took 1.04 seconds to deallocate network for instance. [ 552.728812] env[61629]: DEBUG nova.compute.claims [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 552.729057] env[61629]: DEBUG oslo_concurrency.lockutils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.732048] env[61629]: DEBUG nova.network.neutron [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.754395] env[61629]: DEBUG nova.network.neutron [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.764943] env[61629]: DEBUG nova.network.neutron [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.029270] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.928s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.030626] env[61629]: ERROR nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5bb1633d-e41c-4ced-ab9f-e6019618b6f1, please check neutron logs for more information. [ 553.030626] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Traceback (most recent call last): [ 553.030626] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 553.030626] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] self.driver.spawn(context, instance, image_meta, [ 553.030626] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 553.030626] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] self._vmops.spawn(context, instance, image_meta, injected_files, [ 553.030626] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 553.030626] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] vm_ref = self.build_virtual_machine(instance, [ 553.030626] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 553.030626] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] vif_infos = vmwarevif.get_vif_info(self._session, [ 553.030626] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 553.030948] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] for vif in network_info: [ 553.030948] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 553.030948] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] return self._sync_wrapper(fn, *args, **kwargs) [ 553.030948] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 553.030948] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] self.wait() [ 553.030948] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 553.030948] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] self[:] = self._gt.wait() [ 553.030948] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 553.030948] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] return self._exit_event.wait() [ 553.030948] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 553.030948] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] result = hub.switch() [ 553.030948] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 553.030948] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] return self.greenlet.switch() [ 553.031307] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 553.031307] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] result = function(*args, **kwargs) [ 553.031307] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 553.031307] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] return func(*args, **kwargs) [ 553.031307] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 553.031307] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] raise e [ 553.031307] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 553.031307] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] nwinfo = self.network_api.allocate_for_instance( [ 553.031307] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 553.031307] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] created_port_ids = self._update_ports_for_instance( [ 553.031307] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 553.031307] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] with excutils.save_and_reraise_exception(): [ 553.031307] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.031622] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] self.force_reraise() [ 553.031622] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.031622] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] raise self.value [ 553.031622] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 553.031622] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] updated_port = self._update_port( [ 553.031622] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.031622] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] _ensure_no_port_binding_failure(port) [ 553.031622] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.031622] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] raise exception.PortBindingFailed(port_id=port['id']) [ 553.031622] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] nova.exception.PortBindingFailed: Binding failed for port 5bb1633d-e41c-4ced-ab9f-e6019618b6f1, please check neutron logs for more information. [ 553.031622] env[61629]: ERROR nova.compute.manager [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] [ 553.031884] env[61629]: DEBUG nova.compute.utils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Binding failed for port 5bb1633d-e41c-4ced-ab9f-e6019618b6f1, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 553.031884] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.686s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.033176] env[61629]: INFO nova.compute.claims [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 553.046748] env[61629]: DEBUG nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Build of instance 8a8a3aa7-ec40-4a8a-a823-718025428a59 was re-scheduled: Binding failed for port 5bb1633d-e41c-4ced-ab9f-e6019618b6f1, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 553.046748] env[61629]: DEBUG nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 553.046748] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Acquiring lock "refresh_cache-8a8a3aa7-ec40-4a8a-a823-718025428a59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.046748] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Acquired lock "refresh_cache-8a8a3aa7-ec40-4a8a-a823-718025428a59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.047051] env[61629]: DEBUG nova.network.neutron [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 553.118929] env[61629]: DEBUG nova.compute.manager [req-45178a8a-8a4a-420b-8b54-d88f57ab111f req-d427d4e8-ac17-44f2-8d27-19b15db13642 service nova] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Received event network-changed-9f109052-c649-4c70-b87d-33df40b955c4 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 553.118929] env[61629]: DEBUG nova.compute.manager [req-45178a8a-8a4a-420b-8b54-d88f57ab111f req-d427d4e8-ac17-44f2-8d27-19b15db13642 service nova] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Refreshing instance network info cache due to event network-changed-9f109052-c649-4c70-b87d-33df40b955c4. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 553.118929] env[61629]: DEBUG oslo_concurrency.lockutils [req-45178a8a-8a4a-420b-8b54-d88f57ab111f req-d427d4e8-ac17-44f2-8d27-19b15db13642 service nova] Acquiring lock "refresh_cache-f128e0a7-f67b-4800-bfd6-ec65c5042460" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.125069] env[61629]: DEBUG nova.compute.manager [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 553.164058] env[61629]: DEBUG nova.virt.hardware [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 553.164375] env[61629]: DEBUG nova.virt.hardware [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 553.164464] env[61629]: DEBUG nova.virt.hardware [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 553.164636] env[61629]: DEBUG nova.virt.hardware [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 553.164774] env[61629]: DEBUG nova.virt.hardware [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 553.164914] env[61629]: DEBUG nova.virt.hardware [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 553.166167] env[61629]: DEBUG nova.virt.hardware [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 553.166167] env[61629]: DEBUG nova.virt.hardware [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 553.166167] env[61629]: DEBUG nova.virt.hardware [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 553.166403] env[61629]: DEBUG nova.virt.hardware [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 553.166618] env[61629]: DEBUG nova.virt.hardware [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 553.167523] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afa1d04-c23b-4d21-ac97-1d0ece9abfc3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.179581] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca37c00-1198-4b0c-8ba4-59ac982972b2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.191155] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Instance VIF info [] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 553.197391] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Creating folder: Project (9cbd2a219a834a3980b6438f0756be73). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 553.197716] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0ce9d21-5902-498d-a446-152914d30b31 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.209818] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Created folder: Project (9cbd2a219a834a3980b6438f0756be73) in parent group-v288443. [ 553.209818] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Creating folder: Instances. Parent ref: group-v288447. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 553.209818] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d9e83a0-dcad-4f8f-b29e-3e2816e4617c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.219046] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Created folder: Instances in parent group-v288447. [ 553.219046] env[61629]: DEBUG oslo.service.loopingcall [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 553.219365] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 553.219365] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00d31578-9cdc-437e-bb53-79e6a108a6af {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.240879] env[61629]: DEBUG oslo_concurrency.lockutils [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] Releasing lock "refresh_cache-bfbff392-0dc0-47c7-ae58-22d922638ac8" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.241195] env[61629]: DEBUG nova.compute.manager [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Received event network-vif-deleted-55b2c3cb-0b0e-4df8-a6df-314f7cf77caa {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 553.241454] env[61629]: DEBUG nova.compute.manager [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Received event network-changed-1ad574ca-30cb-485b-b57a-83736bdfbe6d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 553.241681] env[61629]: DEBUG nova.compute.manager [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Refreshing instance network info cache due to event network-changed-1ad574ca-30cb-485b-b57a-83736bdfbe6d. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 553.241891] env[61629]: DEBUG oslo_concurrency.lockutils [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] Acquiring lock "refresh_cache-d29660cc-47f8-4ca5-b21f-bcfd945abc03" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.242040] env[61629]: DEBUG oslo_concurrency.lockutils [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] Acquired lock "refresh_cache-d29660cc-47f8-4ca5-b21f-bcfd945abc03" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.242834] env[61629]: DEBUG nova.network.neutron [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Refreshing network info cache for port 1ad574ca-30cb-485b-b57a-83736bdfbe6d {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 553.249694] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 553.249694] env[61629]: value = "task-1353952" [ 553.249694] env[61629]: _type = "Task" [ 553.249694] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.263133] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "refresh_cache-1a756eed-d5f2-4135-b522-ed06e20da1bc" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.263568] env[61629]: DEBUG nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 553.263752] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 553.263985] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1353952, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.265809] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1bbee9ec-f0e4-4fa8-b8a0-ba682ce70cbe {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.268354] env[61629]: DEBUG oslo_concurrency.lockutils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Releasing lock "refresh_cache-f128e0a7-f67b-4800-bfd6-ec65c5042460" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.268663] env[61629]: DEBUG nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 553.268852] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 553.269183] env[61629]: DEBUG oslo_concurrency.lockutils [req-45178a8a-8a4a-420b-8b54-d88f57ab111f req-d427d4e8-ac17-44f2-8d27-19b15db13642 service nova] Acquired lock "refresh_cache-f128e0a7-f67b-4800-bfd6-ec65c5042460" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.271026] env[61629]: DEBUG nova.network.neutron [req-45178a8a-8a4a-420b-8b54-d88f57ab111f req-d427d4e8-ac17-44f2-8d27-19b15db13642 service nova] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Refreshing network info cache for port 9f109052-c649-4c70-b87d-33df40b955c4 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 553.271026] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-418e2174-7850-4ed2-ad3f-08a0c14218bd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.284682] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def6ebfc-7385-4f60-a71d-52064af3b3c4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.298428] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f662280b-a803-40f5-90a1-b8898837537a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.323404] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f128e0a7-f67b-4800-bfd6-ec65c5042460 could not be found. [ 553.323757] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 553.323961] env[61629]: INFO nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Took 0.06 seconds to destroy the instance on the hypervisor. [ 553.324238] env[61629]: DEBUG oslo.service.loopingcall [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 553.329639] env[61629]: DEBUG nova.compute.manager [-] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 553.329744] env[61629]: DEBUG nova.network.neutron [-] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 553.332357] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1a756eed-d5f2-4135-b522-ed06e20da1bc could not be found. [ 553.332562] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 553.332731] env[61629]: INFO nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Took 0.07 seconds to destroy the instance on the hypervisor. [ 553.332956] env[61629]: DEBUG oslo.service.loopingcall [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 553.333519] env[61629]: DEBUG nova.compute.manager [-] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 553.333723] env[61629]: DEBUG nova.network.neutron [-] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 553.354515] env[61629]: DEBUG nova.network.neutron [-] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.364150] env[61629]: DEBUG nova.network.neutron [-] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.428272] env[61629]: DEBUG oslo_concurrency.lockutils [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Acquiring lock "113fe8e6-bc12-41fe-a405-cec2aa1a717e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.428802] env[61629]: DEBUG oslo_concurrency.lockutils [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Lock "113fe8e6-bc12-41fe-a405-cec2aa1a717e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.428802] env[61629]: DEBUG oslo_concurrency.lockutils [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Acquiring lock "113fe8e6-bc12-41fe-a405-cec2aa1a717e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.429062] env[61629]: DEBUG oslo_concurrency.lockutils [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Lock "113fe8e6-bc12-41fe-a405-cec2aa1a717e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.429276] env[61629]: DEBUG oslo_concurrency.lockutils [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Lock "113fe8e6-bc12-41fe-a405-cec2aa1a717e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.432633] env[61629]: INFO nova.compute.manager [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Terminating instance [ 553.438562] env[61629]: DEBUG oslo_concurrency.lockutils [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Acquiring lock "refresh_cache-113fe8e6-bc12-41fe-a405-cec2aa1a717e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.438769] env[61629]: DEBUG oslo_concurrency.lockutils [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Acquired lock "refresh_cache-113fe8e6-bc12-41fe-a405-cec2aa1a717e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.438970] env[61629]: DEBUG nova.network.neutron [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 553.588603] env[61629]: DEBUG nova.network.neutron [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.753516] env[61629]: DEBUG nova.network.neutron [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.766667] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1353952, 'name': CreateVM_Task} progress is 15%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.802148] env[61629]: DEBUG nova.network.neutron [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.813263] env[61629]: DEBUG nova.network.neutron [req-45178a8a-8a4a-420b-8b54-d88f57ab111f req-d427d4e8-ac17-44f2-8d27-19b15db13642 service nova] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.859901] env[61629]: DEBUG nova.network.neutron [-] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.869434] env[61629]: DEBUG nova.network.neutron [-] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.968109] env[61629]: DEBUG nova.network.neutron [req-45178a8a-8a4a-420b-8b54-d88f57ab111f req-d427d4e8-ac17-44f2-8d27-19b15db13642 service nova] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.180876] env[61629]: DEBUG nova.network.neutron [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 554.265313] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Releasing lock "refresh_cache-8a8a3aa7-ec40-4a8a-a823-718025428a59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.265730] env[61629]: DEBUG nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 554.266068] env[61629]: DEBUG nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 554.266366] env[61629]: DEBUG nova.network.neutron [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 554.269837] env[61629]: DEBUG nova.network.neutron [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.280861] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1353952, 'name': CreateVM_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.293062] env[61629]: DEBUG nova.network.neutron [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 554.367205] env[61629]: INFO nova.compute.manager [-] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Took 1.04 seconds to deallocate network for instance. [ 554.372372] env[61629]: DEBUG nova.compute.claims [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 554.372372] env[61629]: DEBUG oslo_concurrency.lockutils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.376159] env[61629]: INFO nova.compute.manager [-] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Took 1.04 seconds to deallocate network for instance. [ 554.380473] env[61629]: DEBUG nova.compute.claims [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 554.380643] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.384252] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13ce3e9-2fc2-4da7-a917-bcefff4d79ad {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.394117] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e477db1-7357-444c-9a6e-ec93746b1700 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.427401] env[61629]: DEBUG nova.network.neutron [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.428979] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc328735-bde6-4172-b835-225070717bdd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.442329] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95701447-93ea-414c-b969-0f7d7e9c61e2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.457830] env[61629]: DEBUG nova.compute.provider_tree [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 554.471729] env[61629]: DEBUG oslo_concurrency.lockutils [req-45178a8a-8a4a-420b-8b54-d88f57ab111f req-d427d4e8-ac17-44f2-8d27-19b15db13642 service nova] Releasing lock "refresh_cache-f128e0a7-f67b-4800-bfd6-ec65c5042460" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.472521] env[61629]: DEBUG nova.compute.manager [req-45178a8a-8a4a-420b-8b54-d88f57ab111f req-d427d4e8-ac17-44f2-8d27-19b15db13642 service nova] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Received event network-vif-deleted-9f109052-c649-4c70-b87d-33df40b955c4 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 554.772451] env[61629]: DEBUG oslo_concurrency.lockutils [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] Releasing lock "refresh_cache-d29660cc-47f8-4ca5-b21f-bcfd945abc03" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.772868] env[61629]: DEBUG nova.compute.manager [req-71001404-3c40-4426-96fe-7922c564bb24 req-c252b469-b150-4d59-a968-6b301168d7e3 service nova] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Received event network-vif-deleted-1ad574ca-30cb-485b-b57a-83736bdfbe6d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 554.773307] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1353952, 'name': CreateVM_Task, 'duration_secs': 1.147373} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.773603] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 554.774613] env[61629]: DEBUG oslo_vmware.service [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a27c123-26b7-4427-bbfb-5660fc8bdc2b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.785138] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.785463] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.785953] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 554.786279] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d92d91f-6219-4601-a308-dfddb865e7e9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.793776] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 554.793776] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5281aacc-7a61-0965-d68e-ab17ebba85a7" [ 554.793776] env[61629]: _type = "Task" [ 554.793776] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.797935] env[61629]: DEBUG nova.network.neutron [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.803808] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5281aacc-7a61-0965-d68e-ab17ebba85a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.933014] env[61629]: DEBUG oslo_concurrency.lockutils [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Releasing lock "refresh_cache-113fe8e6-bc12-41fe-a405-cec2aa1a717e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.933359] env[61629]: DEBUG nova.compute.manager [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 554.933556] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 554.935038] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b87ff0a-76f4-4326-8d51-99afe4c3e210 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.943011] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 554.943356] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6206c02b-51d7-4a4c-a55f-454cd84ee930 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.950782] env[61629]: DEBUG oslo_vmware.api [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for the task: (returnval){ [ 554.950782] env[61629]: value = "task-1353953" [ 554.950782] env[61629]: _type = "Task" [ 554.950782] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.959258] env[61629]: DEBUG oslo_vmware.api [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353953, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.964795] env[61629]: DEBUG nova.scheduler.client.report [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 555.128068] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Acquiring lock "b0343f07-0539-4395-81c8-46ca1f2a8920" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.128323] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Lock "b0343f07-0539-4395-81c8-46ca1f2a8920" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.170408] env[61629]: DEBUG nova.compute.manager [req-74fab9af-a8df-4820-a8e5-299007ccf794 req-7eff8a7a-46c0-41b0-9624-b0902039da89 service nova] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Received event network-changed-f4597226-4835-4be9-97ae-427377d35bcf {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 555.170598] env[61629]: DEBUG nova.compute.manager [req-74fab9af-a8df-4820-a8e5-299007ccf794 req-7eff8a7a-46c0-41b0-9624-b0902039da89 service nova] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Refreshing instance network info cache due to event network-changed-f4597226-4835-4be9-97ae-427377d35bcf. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 555.171888] env[61629]: DEBUG oslo_concurrency.lockutils [req-74fab9af-a8df-4820-a8e5-299007ccf794 req-7eff8a7a-46c0-41b0-9624-b0902039da89 service nova] Acquiring lock "refresh_cache-1a756eed-d5f2-4135-b522-ed06e20da1bc" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.171888] env[61629]: DEBUG oslo_concurrency.lockutils [req-74fab9af-a8df-4820-a8e5-299007ccf794 req-7eff8a7a-46c0-41b0-9624-b0902039da89 service nova] Acquired lock "refresh_cache-1a756eed-d5f2-4135-b522-ed06e20da1bc" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.171888] env[61629]: DEBUG nova.network.neutron [req-74fab9af-a8df-4820-a8e5-299007ccf794 req-7eff8a7a-46c0-41b0-9624-b0902039da89 service nova] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Refreshing network info cache for port f4597226-4835-4be9-97ae-427377d35bcf {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 555.302587] env[61629]: INFO nova.compute.manager [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] [instance: 8a8a3aa7-ec40-4a8a-a823-718025428a59] Took 1.04 seconds to deallocate network for instance. [ 555.309903] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.310097] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 555.310333] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.310473] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.310646] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 555.310890] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0360c60-4ae1-40c2-9920-4cdc76b2d611 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.328394] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 555.328640] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 555.330061] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a65a23-7890-4d9e-8e51-3c0df9317424 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.347457] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0ae7d49-5bf3-49cc-88a8-e0424fedbfc5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.354583] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 555.354583] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52c26e5b-a017-6088-be74-ddac306f6059" [ 555.354583] env[61629]: _type = "Task" [ 555.354583] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.367675] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c26e5b-a017-6088-be74-ddac306f6059, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.461952] env[61629]: DEBUG oslo_vmware.api [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353953, 'name': PowerOffVM_Task, 'duration_secs': 0.223764} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.461952] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 555.461952] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 555.462155] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a935d399-cbf7-458e-9390-7b85b5d3c6b3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.468685] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.437s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.469308] env[61629]: DEBUG nova.compute.manager [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 555.472468] env[61629]: DEBUG oslo_concurrency.lockutils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.035s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.489738] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 555.489932] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 555.490166] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Deleting the datastore file [datastore2] 113fe8e6-bc12-41fe-a405-cec2aa1a717e {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 555.490396] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64bee923-afce-4781-b820-06df1d841c5a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.499677] env[61629]: DEBUG oslo_vmware.api [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for the task: (returnval){ [ 555.499677] env[61629]: value = "task-1353955" [ 555.499677] env[61629]: _type = "Task" [ 555.499677] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.511936] env[61629]: DEBUG oslo_vmware.api [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353955, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.712132] env[61629]: DEBUG nova.network.neutron [req-74fab9af-a8df-4820-a8e5-299007ccf794 req-7eff8a7a-46c0-41b0-9624-b0902039da89 service nova] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 555.874680] env[61629]: DEBUG nova.network.neutron [req-74fab9af-a8df-4820-a8e5-299007ccf794 req-7eff8a7a-46c0-41b0-9624-b0902039da89 service nova] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.874680] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Preparing fetch location {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 555.874680] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Creating directory with path [datastore1] vmware_temp/f7cfb503-c4f6-4cd3-8fa1-3a6cdeffa050/7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 555.874680] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4cf2eda-e102-407a-aa3a-70ae6382707a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.889461] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Created directory with path [datastore1] vmware_temp/f7cfb503-c4f6-4cd3-8fa1-3a6cdeffa050/7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 555.889461] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Fetch image to [datastore1] vmware_temp/f7cfb503-c4f6-4cd3-8fa1-3a6cdeffa050/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 555.889461] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Downloading image file data 7f036972-f3d8-47df-ae86-f8f2844bf80c to [datastore1] vmware_temp/f7cfb503-c4f6-4cd3-8fa1-3a6cdeffa050/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk on the data store datastore1 {{(pid=61629) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 555.889461] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-017c77f8-47be-4e83-a14f-9c065645c53c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.907016] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2f8faf-a704-4b78-8c1f-099efe391bd1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.918639] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11aeb543-c495-4e61-9182-f970ac88c8e0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.960941] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a153473b-0ab2-4b45-9d50-f6919b277893 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.967628] env[61629]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-144b4fe2-9c28-4c60-8985-2d2cefc4fa1a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.974158] env[61629]: DEBUG nova.compute.utils [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 555.976279] env[61629]: DEBUG nova.compute.manager [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Not allocating networking since 'none' was specified. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 556.004962] env[61629]: DEBUG nova.virt.vmwareapi.images [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Downloading image file data 7f036972-f3d8-47df-ae86-f8f2844bf80c to the data store datastore1 {{(pid=61629) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 556.022296] env[61629]: DEBUG oslo_vmware.api [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Task: {'id': task-1353955, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102236} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.022747] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 556.023245] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 556.023552] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 556.023833] env[61629]: INFO nova.compute.manager [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Took 1.09 seconds to destroy the instance on the hypervisor. [ 556.024191] env[61629]: DEBUG oslo.service.loopingcall [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 556.025111] env[61629]: DEBUG nova.compute.manager [-] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 556.025111] env[61629]: DEBUG nova.network.neutron [-] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 556.067790] env[61629]: DEBUG nova.network.neutron [-] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 556.078326] env[61629]: DEBUG oslo_vmware.rw_handles [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f7cfb503-c4f6-4cd3-8fa1-3a6cdeffa050/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61629) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 556.280879] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19581f8d-c03d-47c8-94e6-8dc9baae036d tempest-ServersListShow296Test-497296791 tempest-ServersListShow296Test-497296791-project-member] Acquiring lock "643343ed-35c6-44e4-9852-55750f046fa1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.281148] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19581f8d-c03d-47c8-94e6-8dc9baae036d tempest-ServersListShow296Test-497296791 tempest-ServersListShow296Test-497296791-project-member] Lock "643343ed-35c6-44e4-9852-55750f046fa1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.341826] env[61629]: INFO nova.scheduler.client.report [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Deleted allocations for instance 8a8a3aa7-ec40-4a8a-a823-718025428a59 [ 556.374858] env[61629]: DEBUG oslo_concurrency.lockutils [req-74fab9af-a8df-4820-a8e5-299007ccf794 req-7eff8a7a-46c0-41b0-9624-b0902039da89 service nova] Releasing lock "refresh_cache-1a756eed-d5f2-4135-b522-ed06e20da1bc" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.375230] env[61629]: DEBUG nova.compute.manager [req-74fab9af-a8df-4820-a8e5-299007ccf794 req-7eff8a7a-46c0-41b0-9624-b0902039da89 service nova] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Received event network-vif-deleted-f4597226-4835-4be9-97ae-427377d35bcf {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 556.422150] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b531198-7131-4e8b-a0ff-d5d97b8f5fef {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.435491] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63c0a5b-7b60-43a6-97af-a12338290359 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.479589] env[61629]: DEBUG nova.compute.manager [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 556.482943] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-547ab26e-4089-4334-927b-019efa739202 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.494835] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c91e654-c5dc-41a7-9160-8cabbc9d3e56 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.512175] env[61629]: DEBUG nova.compute.provider_tree [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 556.570176] env[61629]: DEBUG nova.network.neutron [-] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.701576] env[61629]: DEBUG oslo_vmware.rw_handles [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Completed reading data from the image iterator. {{(pid=61629) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 556.701696] env[61629]: DEBUG oslo_vmware.rw_handles [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f7cfb503-c4f6-4cd3-8fa1-3a6cdeffa050/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61629) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 556.854961] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da3a249c-9b0f-488c-830c-efc91b6b2788 tempest-ImagesOneServerNegativeTestJSON-1851830790 tempest-ImagesOneServerNegativeTestJSON-1851830790-project-member] Lock "8a8a3aa7-ec40-4a8a-a823-718025428a59" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.976s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.015787] env[61629]: DEBUG nova.scheduler.client.report [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 557.076539] env[61629]: INFO nova.compute.manager [-] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Took 1.05 seconds to deallocate network for instance. [ 557.169041] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Acquiring lock "b5625b76-37e3-49be-bd3b-8b864021dbd1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.169041] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Lock "b5625b76-37e3-49be-bd3b-8b864021dbd1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.358127] env[61629]: DEBUG nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 557.496458] env[61629]: DEBUG nova.compute.manager [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 557.524100] env[61629]: DEBUG oslo_concurrency.lockutils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.051s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.525428] env[61629]: ERROR nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4cedabf3-2049-4c0a-a3b3-f3fc62767ed6, please check neutron logs for more information. [ 557.525428] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Traceback (most recent call last): [ 557.525428] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 557.525428] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] self.driver.spawn(context, instance, image_meta, [ 557.525428] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 557.525428] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 557.525428] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 557.525428] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] vm_ref = self.build_virtual_machine(instance, [ 557.525428] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 557.525428] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] vif_infos = vmwarevif.get_vif_info(self._session, [ 557.525428] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 557.525786] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] for vif in network_info: [ 557.525786] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 557.525786] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] return self._sync_wrapper(fn, *args, **kwargs) [ 557.525786] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 557.525786] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] self.wait() [ 557.525786] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 557.525786] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] self[:] = self._gt.wait() [ 557.525786] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 557.525786] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] return self._exit_event.wait() [ 557.525786] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 557.525786] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] result = hub.switch() [ 557.525786] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 557.525786] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] return self.greenlet.switch() [ 557.526120] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 557.526120] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] result = function(*args, **kwargs) [ 557.526120] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 557.526120] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] return func(*args, **kwargs) [ 557.526120] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 557.526120] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] raise e [ 557.526120] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 557.526120] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] nwinfo = self.network_api.allocate_for_instance( [ 557.526120] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 557.526120] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] created_port_ids = self._update_ports_for_instance( [ 557.526120] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 557.526120] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] with excutils.save_and_reraise_exception(): [ 557.526120] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 557.526437] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] self.force_reraise() [ 557.526437] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 557.526437] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] raise self.value [ 557.526437] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 557.526437] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] updated_port = self._update_port( [ 557.526437] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 557.526437] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] _ensure_no_port_binding_failure(port) [ 557.526437] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 557.526437] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] raise exception.PortBindingFailed(port_id=port['id']) [ 557.526437] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] nova.exception.PortBindingFailed: Binding failed for port 4cedabf3-2049-4c0a-a3b3-f3fc62767ed6, please check neutron logs for more information. [ 557.526437] env[61629]: ERROR nova.compute.manager [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] [ 557.526703] env[61629]: DEBUG nova.compute.utils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Binding failed for port 4cedabf3-2049-4c0a-a3b3-f3fc62767ed6, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 557.532921] env[61629]: DEBUG nova.virt.hardware [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 557.533376] env[61629]: DEBUG nova.virt.hardware [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 557.533376] env[61629]: DEBUG nova.virt.hardware [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 557.533573] env[61629]: DEBUG nova.virt.hardware [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 557.533608] env[61629]: DEBUG nova.virt.hardware [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 557.533980] env[61629]: DEBUG nova.virt.hardware [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 557.533980] env[61629]: DEBUG nova.virt.hardware [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 557.534109] env[61629]: DEBUG nova.virt.hardware [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 557.534611] env[61629]: DEBUG nova.virt.hardware [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 557.534816] env[61629]: DEBUG nova.virt.hardware [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 557.534993] env[61629]: DEBUG nova.virt.hardware [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 557.535603] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.355s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.535603] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.535603] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61629) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 557.535855] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.761s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.545040] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5222b1-e559-4bf7-be30-6db15a89db99 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.549857] env[61629]: DEBUG nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Build of instance 20e445dd-663c-46e4-bc0a-f00e68ecd6cd was re-scheduled: Binding failed for port 4cedabf3-2049-4c0a-a3b3-f3fc62767ed6, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 557.550300] env[61629]: DEBUG nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 557.550511] env[61629]: DEBUG oslo_concurrency.lockutils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Acquiring lock "refresh_cache-20e445dd-663c-46e4-bc0a-f00e68ecd6cd" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.553859] env[61629]: DEBUG oslo_concurrency.lockutils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Acquired lock "refresh_cache-20e445dd-663c-46e4-bc0a-f00e68ecd6cd" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.553859] env[61629]: DEBUG nova.network.neutron [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 557.558023] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a35eb1-2764-4b39-a0d6-040a88a0b2ed {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.567940] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-713b38e2-efe5-442a-81dd-2b39d304ce9c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.574297] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7d6fe5-67f1-4813-ac2b-03f57bc45327 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.598303] env[61629]: DEBUG oslo_concurrency.lockutils [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.599871] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9226c0ad-6bc4-4f15-be00-468075198f79 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.603185] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Instance VIF info [] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 557.609335] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Creating folder: Project (16c634fe725d42528c21f7d4da71c314). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 557.609828] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e30328a-bf6b-4879-ba4b-dd5ae86120fc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.616333] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6bf01e-998e-4d07-9d27-d6ed3e34f052 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.622948] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Created folder: Project (16c634fe725d42528c21f7d4da71c314) in parent group-v288443. [ 557.623041] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Creating folder: Instances. Parent ref: group-v288450. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 557.624098] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9425b6f2-e72a-483d-9f99-d813c33db91e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.654837] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181528MB free_disk=151GB free_vcpus=48 pci_devices=None {{(pid=61629) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 557.655060] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.662241] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Created folder: Instances in parent group-v288450. [ 557.662241] env[61629]: DEBUG oslo.service.loopingcall [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 557.662241] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 557.662467] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6544444c-e49b-4e97-9394-79e8365f1e6b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.679789] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 557.679789] env[61629]: value = "task-1353958" [ 557.679789] env[61629]: _type = "Task" [ 557.679789] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.688165] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1353958, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.890102] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.094647] env[61629]: DEBUG nova.network.neutron [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 558.192580] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1353958, 'name': CreateVM_Task, 'duration_secs': 0.42521} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.198210] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 558.199803] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.199803] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.199803] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 558.199803] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bda5fa27-da4d-4f1e-b5bf-7810a5e6ae20 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.205718] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 558.205718] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5254bf02-3bd7-1b52-fd92-f0c70edf5181" [ 558.205718] env[61629]: _type = "Task" [ 558.205718] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.214078] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5254bf02-3bd7-1b52-fd92-f0c70edf5181, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.244038] env[61629]: DEBUG nova.network.neutron [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.430452] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c42812-e2b7-45de-adc8-9a7aedbd8936 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.441355] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7118f17-adb2-4305-bbc2-8bcfa186530e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.479207] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3a30d7-507c-4efd-8ce6-2f3965cc1f69 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.487285] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9ac80b-a9fe-4583-b2db-30dfbf53e856 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.502579] env[61629]: DEBUG nova.compute.provider_tree [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 558.718616] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.718616] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 558.718803] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.749733] env[61629]: DEBUG oslo_concurrency.lockutils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Releasing lock "refresh_cache-20e445dd-663c-46e4-bc0a-f00e68ecd6cd" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.749733] env[61629]: DEBUG nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 558.749733] env[61629]: DEBUG nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 558.749733] env[61629]: DEBUG nova.network.neutron [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 558.774387] env[61629]: DEBUG nova.network.neutron [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 558.864263] env[61629]: DEBUG nova.virt.vmwareapi.images [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Downloaded image file data 7f036972-f3d8-47df-ae86-f8f2844bf80c to vmware_temp/f7cfb503-c4f6-4cd3-8fa1-3a6cdeffa050/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk on the data store datastore1 {{(pid=61629) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 558.869545] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Caching image {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 558.869545] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Copying Virtual Disk [datastore1] vmware_temp/f7cfb503-c4f6-4cd3-8fa1-3a6cdeffa050/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk to [datastore1] vmware_temp/f7cfb503-c4f6-4cd3-8fa1-3a6cdeffa050/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 558.869545] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e052f3e-1e35-4ba2-a703-e7231f6f7da5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.877747] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 558.877747] env[61629]: value = "task-1353959" [ 558.877747] env[61629]: _type = "Task" [ 558.877747] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.886171] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353959, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.005976] env[61629]: DEBUG nova.scheduler.client.report [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 559.276917] env[61629]: DEBUG nova.network.neutron [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.395167] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353959, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.513606] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.977s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.514239] env[61629]: ERROR nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9f30835a-0bc5-4945-8f94-dcc9db484a75, please check neutron logs for more information. [ 559.514239] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Traceback (most recent call last): [ 559.514239] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 559.514239] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] self.driver.spawn(context, instance, image_meta, [ 559.514239] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 559.514239] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 559.514239] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 559.514239] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] vm_ref = self.build_virtual_machine(instance, [ 559.514239] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 559.514239] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] vif_infos = vmwarevif.get_vif_info(self._session, [ 559.514239] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 559.514663] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] for vif in network_info: [ 559.514663] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 559.514663] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] return self._sync_wrapper(fn, *args, **kwargs) [ 559.514663] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 559.514663] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] self.wait() [ 559.514663] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 559.514663] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] self[:] = self._gt.wait() [ 559.514663] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 559.514663] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] return self._exit_event.wait() [ 559.514663] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 559.514663] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] result = hub.switch() [ 559.514663] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 559.514663] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] return self.greenlet.switch() [ 559.515084] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 559.515084] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] result = function(*args, **kwargs) [ 559.515084] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 559.515084] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] return func(*args, **kwargs) [ 559.515084] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 559.515084] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] raise e [ 559.515084] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 559.515084] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] nwinfo = self.network_api.allocate_for_instance( [ 559.515084] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 559.515084] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] created_port_ids = self._update_ports_for_instance( [ 559.515084] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 559.515084] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] with excutils.save_and_reraise_exception(): [ 559.515084] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 559.515693] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] self.force_reraise() [ 559.515693] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 559.515693] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] raise self.value [ 559.515693] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 559.515693] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] updated_port = self._update_port( [ 559.515693] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 559.515693] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] _ensure_no_port_binding_failure(port) [ 559.515693] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 559.515693] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] raise exception.PortBindingFailed(port_id=port['id']) [ 559.515693] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] nova.exception.PortBindingFailed: Binding failed for port 9f30835a-0bc5-4945-8f94-dcc9db484a75, please check neutron logs for more information. [ 559.515693] env[61629]: ERROR nova.compute.manager [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] [ 559.516011] env[61629]: DEBUG nova.compute.utils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Binding failed for port 9f30835a-0bc5-4945-8f94-dcc9db484a75, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 559.516483] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.590s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.519731] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Build of instance abb87186-9951-4fbe-98b2-b595dd4fea12 was re-scheduled: Binding failed for port 9f30835a-0bc5-4945-8f94-dcc9db484a75, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 559.520270] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 559.520533] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "refresh_cache-abb87186-9951-4fbe-98b2-b595dd4fea12" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.520743] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquired lock "refresh_cache-abb87186-9951-4fbe-98b2-b595dd4fea12" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.520898] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 559.651261] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Acquiring lock "d43d47a2-a27b-4bb8-9421-61805064a3d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.651516] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Lock "d43d47a2-a27b-4bb8-9421-61805064a3d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.781398] env[61629]: INFO nova.compute.manager [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] [instance: 20e445dd-663c-46e4-bc0a-f00e68ecd6cd] Took 1.03 seconds to deallocate network for instance. [ 559.895120] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353959, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.765927} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.895120] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Copied Virtual Disk [datastore1] vmware_temp/f7cfb503-c4f6-4cd3-8fa1-3a6cdeffa050/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk to [datastore1] vmware_temp/f7cfb503-c4f6-4cd3-8fa1-3a6cdeffa050/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 559.895120] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Deleting the datastore file [datastore1] vmware_temp/f7cfb503-c4f6-4cd3-8fa1-3a6cdeffa050/7f036972-f3d8-47df-ae86-f8f2844bf80c/tmp-sparse.vmdk {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 559.895120] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e6da3f9-e121-45c9-87df-339dadfe5a3e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.899913] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 559.899913] env[61629]: value = "task-1353960" [ 559.899913] env[61629]: _type = "Task" [ 559.899913] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.909828] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353960, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.051606] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 560.178320] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.356961] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38748240-c583-4e76-9adc-e9fb3cbff81e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.366628] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f7a5a1-9a5a-482c-a23f-f5287fa35a15 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.407651] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66dda96f-4c46-4857-82ba-2186d6c80ada {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.417820] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d686240d-bdeb-46ff-903a-c1e6c4de5c76 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.421704] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353960, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025095} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.421986] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 560.422330] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Moving file from [datastore1] vmware_temp/f7cfb503-c4f6-4cd3-8fa1-3a6cdeffa050/7f036972-f3d8-47df-ae86-f8f2844bf80c to [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c. {{(pid=61629) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 560.422959] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-2b26d76f-36da-4e8b-b63f-d568919ac270 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.436303] env[61629]: DEBUG nova.compute.provider_tree [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 560.443154] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 560.443154] env[61629]: value = "task-1353961" [ 560.443154] env[61629]: _type = "Task" [ 560.443154] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.450923] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353961, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.683699] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Releasing lock "refresh_cache-abb87186-9951-4fbe-98b2-b595dd4fea12" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.684210] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 560.684210] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 560.684555] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 560.720515] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 560.823706] env[61629]: INFO nova.scheduler.client.report [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Deleted allocations for instance 20e445dd-663c-46e4-bc0a-f00e68ecd6cd [ 560.941305] env[61629]: DEBUG nova.scheduler.client.report [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 560.960031] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353961, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.024695} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.965949] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] File moved {{(pid=61629) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 560.965949] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Cleaning up location [datastore1] vmware_temp/f7cfb503-c4f6-4cd3-8fa1-3a6cdeffa050 {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 560.965949] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Deleting the datastore file [datastore1] vmware_temp/f7cfb503-c4f6-4cd3-8fa1-3a6cdeffa050 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 560.969916] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6a31c132-9758-4295-926b-2ca303af5228 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.973655] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 560.973655] env[61629]: value = "task-1353962" [ 560.973655] env[61629]: _type = "Task" [ 560.973655] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.984497] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353962, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.022201] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Acquiring lock "4839c06e-f55a-4162-8eae-cfaeae07cdae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.022201] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Lock "4839c06e-f55a-4162-8eae-cfaeae07cdae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.225732] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.335784] env[61629]: DEBUG oslo_concurrency.lockutils [None req-63eee1be-571b-4de1-9920-756d13e8be10 tempest-ServerDiagnosticsNegativeTest-365223114 tempest-ServerDiagnosticsNegativeTest-365223114-project-member] Lock "20e445dd-663c-46e4-bc0a-f00e68ecd6cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.810s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 561.456105] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.939s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 561.461029] env[61629]: ERROR nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 77a65997-5a5d-45e6-8056-38b717c5802e, please check neutron logs for more information. [ 561.461029] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Traceback (most recent call last): [ 561.461029] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 561.461029] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] self.driver.spawn(context, instance, image_meta, [ 561.461029] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 561.461029] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] self._vmops.spawn(context, instance, image_meta, injected_files, [ 561.461029] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 561.461029] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] vm_ref = self.build_virtual_machine(instance, [ 561.461029] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 561.461029] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] vif_infos = vmwarevif.get_vif_info(self._session, [ 561.461029] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 561.461474] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] for vif in network_info: [ 561.461474] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 561.461474] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] return self._sync_wrapper(fn, *args, **kwargs) [ 561.461474] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 561.461474] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] self.wait() [ 561.461474] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 561.461474] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] self[:] = self._gt.wait() [ 561.461474] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 561.461474] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] return self._exit_event.wait() [ 561.461474] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 561.461474] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] result = hub.switch() [ 561.461474] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 561.461474] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] return self.greenlet.switch() [ 561.461794] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 561.461794] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] result = function(*args, **kwargs) [ 561.461794] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 561.461794] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] return func(*args, **kwargs) [ 561.461794] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 561.461794] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] raise e [ 561.461794] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 561.461794] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] nwinfo = self.network_api.allocate_for_instance( [ 561.461794] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 561.461794] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] created_port_ids = self._update_ports_for_instance( [ 561.461794] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 561.461794] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] with excutils.save_and_reraise_exception(): [ 561.461794] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.462141] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] self.force_reraise() [ 561.462141] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.462141] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] raise self.value [ 561.462141] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 561.462141] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] updated_port = self._update_port( [ 561.462141] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.462141] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] _ensure_no_port_binding_failure(port) [ 561.462141] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.462141] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] raise exception.PortBindingFailed(port_id=port['id']) [ 561.462141] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] nova.exception.PortBindingFailed: Binding failed for port 77a65997-5a5d-45e6-8056-38b717c5802e, please check neutron logs for more information. [ 561.462141] env[61629]: ERROR nova.compute.manager [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] [ 561.462439] env[61629]: DEBUG nova.compute.utils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Binding failed for port 77a65997-5a5d-45e6-8056-38b717c5802e, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 561.468425] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 9.794s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.470542] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Build of instance cf628773-7dcb-430a-b3ae-a5b62808e279 was re-scheduled: Binding failed for port 77a65997-5a5d-45e6-8056-38b717c5802e, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 561.470542] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 561.470542] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "refresh_cache-cf628773-7dcb-430a-b3ae-a5b62808e279" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.470542] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquired lock "refresh_cache-cf628773-7dcb-430a-b3ae-a5b62808e279" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.471014] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 561.484147] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353962, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.044837} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.484394] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 561.485108] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25acdd84-8a52-405a-b321-9482b0b6ebe7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.491740] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 561.491740] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]527d923a-384e-14c1-03da-4564efdb3a86" [ 561.491740] env[61629]: _type = "Task" [ 561.491740] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.502426] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "dd406dd1-0e19-400b-a862-ae51fd134017" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.502648] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "dd406dd1-0e19-400b-a862-ae51fd134017" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.508883] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]527d923a-384e-14c1-03da-4564efdb3a86, 'name': SearchDatastore_Task, 'duration_secs': 0.00891} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.509442] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.510014] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 33029a57-19d2-45eb-b4ec-f50c47d3dc12/33029a57-19d2-45eb-b4ec-f50c47d3dc12.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 561.510014] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.510177] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 561.510415] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-be0a1497-d29c-4127-b06d-152724f6b97d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.512823] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b69b4bdb-47ca-4ce8-b142-36c3553192d5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.523832] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 561.523832] env[61629]: value = "task-1353963" [ 561.523832] env[61629]: _type = "Task" [ 561.523832] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.526587] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 561.528719] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 561.531489] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85439ccd-7bba-49c8-951d-6db7868a9e67 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.540673] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353963, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.542020] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 561.542020] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]526d14c0-7581-cfb7-1449-893de5912069" [ 561.542020] env[61629]: _type = "Task" [ 561.542020] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.551368] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526d14c0-7581-cfb7-1449-893de5912069, 'name': SearchDatastore_Task, 'duration_secs': 0.007972} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.552320] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e9596da-fc08-45b8-8c73-a4cb1500b0bb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.558777] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 561.558777] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5201f18d-7480-d41b-073d-d4fa5a9e2219" [ 561.558777] env[61629]: _type = "Task" [ 561.558777] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.569802] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5201f18d-7480-d41b-073d-d4fa5a9e2219, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.732374] env[61629]: INFO nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: abb87186-9951-4fbe-98b2-b595dd4fea12] Took 1.05 seconds to deallocate network for instance. [ 561.839897] env[61629]: DEBUG nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 562.041315] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353963, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.063282] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 562.079042] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5201f18d-7480-d41b-073d-d4fa5a9e2219, 'name': SearchDatastore_Task, 'duration_secs': 0.016669} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.080949] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.080949] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 733343f7-99e2-4e07-94eb-1b66458d799a/733343f7-99e2-4e07-94eb-1b66458d799a.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 562.080949] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89f62efc-a9e5-425d-873f-5994cf20294e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.089128] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 562.089128] env[61629]: value = "task-1353964" [ 562.089128] env[61629]: _type = "Task" [ 562.089128] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.103208] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353964, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.249064] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.364363] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.441351] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-077d7c0b-88d4-4032-a5d1-86c1bed37ee4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.450279] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b496ae3-55d3-46a3-8780-59fff9180e11 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.492252] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed09309c-4512-45a8-bffa-74320661096d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.501669] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1820a90-2751-4d80-951a-fc5705118029 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.516238] env[61629]: DEBUG nova.compute.provider_tree [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 562.539921] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353963, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.546761} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.540209] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 33029a57-19d2-45eb-b4ec-f50c47d3dc12/33029a57-19d2-45eb-b4ec-f50c47d3dc12.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 562.540660] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 562.540660] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5384fa72-3714-4f82-b457-7f862cdffba5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.548753] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 562.548753] env[61629]: value = "task-1353965" [ 562.548753] env[61629]: _type = "Task" [ 562.548753] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.559550] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353965, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.601949] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353964, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.754291] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Releasing lock "refresh_cache-cf628773-7dcb-430a-b3ae-a5b62808e279" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.754559] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 562.754747] env[61629]: DEBUG nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 562.755153] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 562.777915] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 562.788717] env[61629]: INFO nova.scheduler.client.report [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Deleted allocations for instance abb87186-9951-4fbe-98b2-b595dd4fea12 [ 563.021288] env[61629]: DEBUG nova.scheduler.client.report [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 563.064266] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353965, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097961} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.065286] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 563.066016] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7aa536f-dfff-4fd9-9976-435fa1218faf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.088266] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 33029a57-19d2-45eb-b4ec-f50c47d3dc12/33029a57-19d2-45eb-b4ec-f50c47d3dc12.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 563.089027] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-439cf1d8-9351-49b0-8c6e-f9212c0bf860 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.114695] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353964, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519567} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.115962] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 733343f7-99e2-4e07-94eb-1b66458d799a/733343f7-99e2-4e07-94eb-1b66458d799a.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 563.116212] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 563.116598] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 563.116598] env[61629]: value = "task-1353966" [ 563.116598] env[61629]: _type = "Task" [ 563.116598] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.116757] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d2eebbc-26f6-4c0c-93c2-58289513dedf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.129121] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353966, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.129323] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 563.129323] env[61629]: value = "task-1353967" [ 563.129323] env[61629]: _type = "Task" [ 563.129323] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.137471] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353967, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.280725] env[61629]: DEBUG nova.network.neutron [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.300219] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "abb87186-9951-4fbe-98b2-b595dd4fea12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.419s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.528053] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.062s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.528767] env[61629]: ERROR nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 55b2c3cb-0b0e-4df8-a6df-314f7cf77caa, please check neutron logs for more information. [ 563.528767] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Traceback (most recent call last): [ 563.528767] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 563.528767] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] self.driver.spawn(context, instance, image_meta, [ 563.528767] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 563.528767] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 563.528767] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 563.528767] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] vm_ref = self.build_virtual_machine(instance, [ 563.528767] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 563.528767] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] vif_infos = vmwarevif.get_vif_info(self._session, [ 563.528767] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 563.529091] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] for vif in network_info: [ 563.529091] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 563.529091] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] return self._sync_wrapper(fn, *args, **kwargs) [ 563.529091] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 563.529091] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] self.wait() [ 563.529091] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 563.529091] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] self[:] = self._gt.wait() [ 563.529091] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 563.529091] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] return self._exit_event.wait() [ 563.529091] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 563.529091] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] result = hub.switch() [ 563.529091] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 563.529091] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] return self.greenlet.switch() [ 563.529534] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 563.529534] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] result = function(*args, **kwargs) [ 563.529534] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 563.529534] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] return func(*args, **kwargs) [ 563.529534] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 563.529534] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] raise e [ 563.529534] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.529534] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] nwinfo = self.network_api.allocate_for_instance( [ 563.529534] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 563.529534] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] created_port_ids = self._update_ports_for_instance( [ 563.529534] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 563.529534] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] with excutils.save_and_reraise_exception(): [ 563.529534] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.529908] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] self.force_reraise() [ 563.529908] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.529908] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] raise self.value [ 563.529908] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 563.529908] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] updated_port = self._update_port( [ 563.529908] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.529908] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] _ensure_no_port_binding_failure(port) [ 563.529908] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.529908] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] raise exception.PortBindingFailed(port_id=port['id']) [ 563.529908] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] nova.exception.PortBindingFailed: Binding failed for port 55b2c3cb-0b0e-4df8-a6df-314f7cf77caa, please check neutron logs for more information. [ 563.529908] env[61629]: ERROR nova.compute.manager [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] [ 563.530257] env[61629]: DEBUG nova.compute.utils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Binding failed for port 55b2c3cb-0b0e-4df8-a6df-314f7cf77caa, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 563.530923] env[61629]: DEBUG oslo_concurrency.lockutils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 10.802s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.534042] env[61629]: DEBUG nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Build of instance bfbff392-0dc0-47c7-ae58-22d922638ac8 was re-scheduled: Binding failed for port 55b2c3cb-0b0e-4df8-a6df-314f7cf77caa, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 563.535061] env[61629]: DEBUG nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 563.535364] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Acquiring lock "refresh_cache-bfbff392-0dc0-47c7-ae58-22d922638ac8" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.535364] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Acquired lock "refresh_cache-bfbff392-0dc0-47c7-ae58-22d922638ac8" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.535520] env[61629]: DEBUG nova.network.neutron [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 563.633996] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353966, 'name': ReconfigVM_Task, 'duration_secs': 0.28334} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.641680] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 33029a57-19d2-45eb-b4ec-f50c47d3dc12/33029a57-19d2-45eb-b4ec-f50c47d3dc12.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 563.642677] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37123eca-706d-4677-8e1d-f88d9536dbe9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.650018] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353967, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069509} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.650822] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 563.651416] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 563.651416] env[61629]: value = "task-1353968" [ 563.651416] env[61629]: _type = "Task" [ 563.651416] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.651857] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca6fcf4-7747-4948-ac28-3c2837fdc530 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.675508] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 733343f7-99e2-4e07-94eb-1b66458d799a/733343f7-99e2-4e07-94eb-1b66458d799a.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 563.682031] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0d1068b-d7de-4b3d-8ead-c482d2b8a538 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.693425] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353968, 'name': Rename_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.700198] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 563.700198] env[61629]: value = "task-1353969" [ 563.700198] env[61629]: _type = "Task" [ 563.700198] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.709422] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353969, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.784338] env[61629]: INFO nova.compute.manager [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: cf628773-7dcb-430a-b3ae-a5b62808e279] Took 1.03 seconds to deallocate network for instance. [ 563.801992] env[61629]: DEBUG nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 563.961630] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "2315bd37-6151-42d7-8b54-9ee367be0ed1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.962045] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "2315bd37-6151-42d7-8b54-9ee367be0ed1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.080636] env[61629]: DEBUG nova.network.neutron [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 564.171265] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353968, 'name': Rename_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.210266] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353969, 'name': ReconfigVM_Task, 'duration_secs': 0.40034} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.213199] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 733343f7-99e2-4e07-94eb-1b66458d799a/733343f7-99e2-4e07-94eb-1b66458d799a.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 564.213919] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-43759d7a-d2b8-4051-b27d-b3f6bdc20030 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.221912] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 564.221912] env[61629]: value = "task-1353970" [ 564.221912] env[61629]: _type = "Task" [ 564.221912] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.235085] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353970, 'name': Rename_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.264671] env[61629]: DEBUG nova.network.neutron [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.331983] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.421606] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b70bc1-4608-418a-a6c9-992d456180f4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.428895] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f9c6d1-7aec-4ef0-a805-1bd7093e8928 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.465272] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dca8156-0d70-4e79-8ac5-23e438256dd9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.473202] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29aa18ed-2c76-4bad-9d84-b3e199cf4fc8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.488517] env[61629]: DEBUG nova.compute.provider_tree [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.678379] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353968, 'name': Rename_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.733345] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353970, 'name': Rename_Task, 'duration_secs': 0.227788} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.733405] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 564.733635] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34b347f0-b309-4a4f-b4de-bc2500fb4352 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.744846] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 564.744846] env[61629]: value = "task-1353971" [ 564.744846] env[61629]: _type = "Task" [ 564.744846] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.758108] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353971, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.768848] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Releasing lock "refresh_cache-bfbff392-0dc0-47c7-ae58-22d922638ac8" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.768983] env[61629]: DEBUG nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 564.769345] env[61629]: DEBUG nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 564.769589] env[61629]: DEBUG nova.network.neutron [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 564.813107] env[61629]: DEBUG nova.network.neutron [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 564.833576] env[61629]: INFO nova.scheduler.client.report [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Deleted allocations for instance cf628773-7dcb-430a-b3ae-a5b62808e279 [ 564.992991] env[61629]: DEBUG nova.scheduler.client.report [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 565.173968] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353968, 'name': Rename_Task, 'duration_secs': 1.39626} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.174332] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 565.175102] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db672643-1ced-4e5c-ad90-6a1f1b81bb34 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.183566] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 565.183566] env[61629]: value = "task-1353972" [ 565.183566] env[61629]: _type = "Task" [ 565.183566] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.194357] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353972, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.262578] env[61629]: DEBUG oslo_vmware.api [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353971, 'name': PowerOnVM_Task, 'duration_secs': 0.478795} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.262578] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 565.262578] env[61629]: INFO nova.compute.manager [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Took 7.76 seconds to spawn the instance on the hypervisor. [ 565.262578] env[61629]: DEBUG nova.compute.manager [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 565.263883] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41920248-363d-4988-beaf-542abdb06cd4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.317080] env[61629]: DEBUG nova.network.neutron [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.346562] env[61629]: DEBUG oslo_concurrency.lockutils [None req-037a492d-b6fb-4126-8de9-c8b2ff20eedf tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "cf628773-7dcb-430a-b3ae-a5b62808e279" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.405s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.500260] env[61629]: DEBUG oslo_concurrency.lockutils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.969s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.500881] env[61629]: ERROR nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1ad574ca-30cb-485b-b57a-83736bdfbe6d, please check neutron logs for more information. [ 565.500881] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Traceback (most recent call last): [ 565.500881] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 565.500881] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] self.driver.spawn(context, instance, image_meta, [ 565.500881] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 565.500881] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] self._vmops.spawn(context, instance, image_meta, injected_files, [ 565.500881] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 565.500881] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] vm_ref = self.build_virtual_machine(instance, [ 565.500881] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 565.500881] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] vif_infos = vmwarevif.get_vif_info(self._session, [ 565.500881] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 565.501268] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] for vif in network_info: [ 565.501268] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 565.501268] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] return self._sync_wrapper(fn, *args, **kwargs) [ 565.501268] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 565.501268] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] self.wait() [ 565.501268] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 565.501268] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] self[:] = self._gt.wait() [ 565.501268] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 565.501268] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] return self._exit_event.wait() [ 565.501268] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 565.501268] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] result = hub.switch() [ 565.501268] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 565.501268] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] return self.greenlet.switch() [ 565.501646] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 565.501646] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] result = function(*args, **kwargs) [ 565.501646] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 565.501646] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] return func(*args, **kwargs) [ 565.501646] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 565.501646] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] raise e [ 565.501646] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 565.501646] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] nwinfo = self.network_api.allocate_for_instance( [ 565.501646] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 565.501646] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] created_port_ids = self._update_ports_for_instance( [ 565.501646] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 565.501646] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] with excutils.save_and_reraise_exception(): [ 565.501646] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 565.502023] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] self.force_reraise() [ 565.502023] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 565.502023] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] raise self.value [ 565.502023] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 565.502023] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] updated_port = self._update_port( [ 565.502023] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 565.502023] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] _ensure_no_port_binding_failure(port) [ 565.502023] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 565.502023] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] raise exception.PortBindingFailed(port_id=port['id']) [ 565.502023] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] nova.exception.PortBindingFailed: Binding failed for port 1ad574ca-30cb-485b-b57a-83736bdfbe6d, please check neutron logs for more information. [ 565.502023] env[61629]: ERROR nova.compute.manager [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] [ 565.502310] env[61629]: DEBUG nova.compute.utils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Binding failed for port 1ad574ca-30cb-485b-b57a-83736bdfbe6d, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 565.503707] env[61629]: DEBUG nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Build of instance d29660cc-47f8-4ca5-b21f-bcfd945abc03 was re-scheduled: Binding failed for port 1ad574ca-30cb-485b-b57a-83736bdfbe6d, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 565.504176] env[61629]: DEBUG nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 565.504422] env[61629]: DEBUG oslo_concurrency.lockutils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "refresh_cache-d29660cc-47f8-4ca5-b21f-bcfd945abc03" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.504566] env[61629]: DEBUG oslo_concurrency.lockutils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquired lock "refresh_cache-d29660cc-47f8-4ca5-b21f-bcfd945abc03" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.504720] env[61629]: DEBUG nova.network.neutron [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 565.505783] env[61629]: DEBUG oslo_concurrency.lockutils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.136s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.699049] env[61629]: DEBUG oslo_vmware.api [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353972, 'name': PowerOnVM_Task, 'duration_secs': 0.429668} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.699049] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 565.699049] env[61629]: INFO nova.compute.manager [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Took 12.57 seconds to spawn the instance on the hypervisor. [ 565.699049] env[61629]: DEBUG nova.compute.manager [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 565.700240] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8202e577-8875-452e-81df-ed5f949ce8ac {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.786289] env[61629]: INFO nova.compute.manager [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Took 26.46 seconds to build instance. [ 565.824148] env[61629]: INFO nova.compute.manager [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] [instance: bfbff392-0dc0-47c7-ae58-22d922638ac8] Took 1.05 seconds to deallocate network for instance. [ 565.851385] env[61629]: DEBUG nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 566.060363] env[61629]: DEBUG nova.network.neutron [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.226678] env[61629]: INFO nova.compute.manager [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Took 28.58 seconds to build instance. [ 566.289737] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b6f0ab1e-2301-4417-99ec-7510421de4aa tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Lock "733343f7-99e2-4e07-94eb-1b66458d799a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.074s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.376200] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.383763] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cae9faa-4c5e-4c0b-8523-0457aebb2da9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.392040] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d5d0fdc-90c8-4efa-b115-4c70e7981851 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.423982] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1550ff3-41a1-476f-8fdc-1b30dc9f4a17 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.435331] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097c6abb-45ba-4b54-aed2-e39c9e7da1c6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.447584] env[61629]: DEBUG nova.compute.provider_tree [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.635675] env[61629]: DEBUG nova.network.neutron [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.730558] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b178dce-acc2-4c68-8d68-bf9b8f5aef09 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Lock "33029a57-19d2-45eb-b4ec-f50c47d3dc12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.094s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.795010] env[61629]: INFO nova.compute.manager [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Rebuilding instance [ 566.798231] env[61629]: DEBUG nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 566.877700] env[61629]: INFO nova.scheduler.client.report [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Deleted allocations for instance bfbff392-0dc0-47c7-ae58-22d922638ac8 [ 566.884283] env[61629]: DEBUG nova.compute.manager [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 566.888311] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de53e00-2180-4223-a9e0-27c2b96206bc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.952046] env[61629]: DEBUG nova.scheduler.client.report [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 567.139833] env[61629]: DEBUG oslo_concurrency.lockutils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Releasing lock "refresh_cache-d29660cc-47f8-4ca5-b21f-bcfd945abc03" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.140084] env[61629]: DEBUG nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 567.140275] env[61629]: DEBUG nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 567.140439] env[61629]: DEBUG nova.network.neutron [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 567.235370] env[61629]: DEBUG nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 567.259698] env[61629]: DEBUG nova.network.neutron [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.334127] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.392460] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d0b9718-9f4b-4d17-9f49-9aa5a974666e tempest-ServerDiagnosticsTest-2041675382 tempest-ServerDiagnosticsTest-2041675382-project-member] Lock "bfbff392-0dc0-47c7-ae58-22d922638ac8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.027s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.401806] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 567.402024] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e585f0b-a0f4-4293-8657-0ee37596ac87 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.414628] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 567.414628] env[61629]: value = "task-1353973" [ 567.414628] env[61629]: _type = "Task" [ 567.414628] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.425683] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353973, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.464025] env[61629]: DEBUG oslo_concurrency.lockutils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.956s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.464025] env[61629]: ERROR nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9f109052-c649-4c70-b87d-33df40b955c4, please check neutron logs for more information. [ 567.464025] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Traceback (most recent call last): [ 567.464025] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 567.464025] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] self.driver.spawn(context, instance, image_meta, [ 567.464025] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 567.464025] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] self._vmops.spawn(context, instance, image_meta, injected_files, [ 567.464025] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 567.464025] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] vm_ref = self.build_virtual_machine(instance, [ 567.464381] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 567.464381] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] vif_infos = vmwarevif.get_vif_info(self._session, [ 567.464381] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 567.464381] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] for vif in network_info: [ 567.464381] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 567.464381] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] return self._sync_wrapper(fn, *args, **kwargs) [ 567.464381] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 567.464381] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] self.wait() [ 567.464381] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 567.464381] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] self[:] = self._gt.wait() [ 567.464381] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 567.464381] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] return self._exit_event.wait() [ 567.464381] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 567.464710] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] result = hub.switch() [ 567.464710] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 567.464710] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] return self.greenlet.switch() [ 567.464710] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 567.464710] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] result = function(*args, **kwargs) [ 567.464710] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 567.464710] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] return func(*args, **kwargs) [ 567.464710] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 567.464710] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] raise e [ 567.464710] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.464710] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] nwinfo = self.network_api.allocate_for_instance( [ 567.464710] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 567.464710] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] created_port_ids = self._update_ports_for_instance( [ 567.465060] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 567.465060] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] with excutils.save_and_reraise_exception(): [ 567.465060] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.465060] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] self.force_reraise() [ 567.465060] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.465060] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] raise self.value [ 567.465060] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 567.465060] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] updated_port = self._update_port( [ 567.465060] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.465060] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] _ensure_no_port_binding_failure(port) [ 567.465060] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.465060] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] raise exception.PortBindingFailed(port_id=port['id']) [ 567.465362] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] nova.exception.PortBindingFailed: Binding failed for port 9f109052-c649-4c70-b87d-33df40b955c4, please check neutron logs for more information. [ 567.465362] env[61629]: ERROR nova.compute.manager [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] [ 567.465362] env[61629]: DEBUG nova.compute.utils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Binding failed for port 9f109052-c649-4c70-b87d-33df40b955c4, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 567.467011] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.086s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.472491] env[61629]: DEBUG nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Build of instance f128e0a7-f67b-4800-bfd6-ec65c5042460 was re-scheduled: Binding failed for port 9f109052-c649-4c70-b87d-33df40b955c4, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 567.472491] env[61629]: DEBUG nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 567.472491] env[61629]: DEBUG oslo_concurrency.lockutils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Acquiring lock "refresh_cache-f128e0a7-f67b-4800-bfd6-ec65c5042460" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.472491] env[61629]: DEBUG oslo_concurrency.lockutils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Acquired lock "refresh_cache-f128e0a7-f67b-4800-bfd6-ec65c5042460" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.472692] env[61629]: DEBUG nova.network.neutron [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 567.764024] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.764461] env[61629]: DEBUG nova.network.neutron [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.895739] env[61629]: DEBUG nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 567.927375] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353973, 'name': PowerOffVM_Task, 'duration_secs': 0.124913} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.927375] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 567.927375] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 567.927783] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19aa6f53-b93f-44a5-a833-7d9b2224ad14 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.935908] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 567.937016] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e560b301-63a8-4902-81c4-6045722546dc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.961286] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 567.961514] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 567.961695] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Deleting the datastore file [datastore1] 733343f7-99e2-4e07-94eb-1b66458d799a {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 567.962752] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-78f16c68-7294-401c-87e1-749bd356184c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.971258] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 567.971258] env[61629]: value = "task-1353975" [ 567.971258] env[61629]: _type = "Task" [ 567.971258] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.989056] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353975, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.041991] env[61629]: DEBUG nova.network.neutron [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 568.269239] env[61629]: INFO nova.compute.manager [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: d29660cc-47f8-4ca5-b21f-bcfd945abc03] Took 1.13 seconds to deallocate network for instance. [ 568.321855] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc9fa58-8cc7-47dc-b534-768f31dd1e26 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.330692] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-881e7bd6-12e3-4666-b8ed-dbad1af32d7a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.360446] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a7ddd8-5f28-4808-a20c-07396d647a25 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.368094] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3056cfe-4491-4932-992c-e3964e8526b3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.381528] env[61629]: DEBUG nova.compute.provider_tree [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 568.418477] env[61629]: DEBUG oslo_concurrency.lockutils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.457388] env[61629]: DEBUG nova.network.neutron [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.486034] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353975, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132565} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.486034] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 568.486034] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 568.486034] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 568.885087] env[61629]: DEBUG nova.scheduler.client.report [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 568.963193] env[61629]: DEBUG oslo_concurrency.lockutils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Releasing lock "refresh_cache-f128e0a7-f67b-4800-bfd6-ec65c5042460" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.963193] env[61629]: DEBUG nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 568.963193] env[61629]: DEBUG nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 568.963193] env[61629]: DEBUG nova.network.neutron [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 569.001978] env[61629]: DEBUG nova.network.neutron [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 569.315452] env[61629]: INFO nova.scheduler.client.report [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Deleted allocations for instance d29660cc-47f8-4ca5-b21f-bcfd945abc03 [ 569.393787] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.927s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.394883] env[61629]: ERROR nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f4597226-4835-4be9-97ae-427377d35bcf, please check neutron logs for more information. [ 569.394883] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Traceback (most recent call last): [ 569.394883] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 569.394883] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] self.driver.spawn(context, instance, image_meta, [ 569.394883] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 569.394883] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 569.394883] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 569.394883] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] vm_ref = self.build_virtual_machine(instance, [ 569.394883] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 569.394883] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] vif_infos = vmwarevif.get_vif_info(self._session, [ 569.394883] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 569.395276] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] for vif in network_info: [ 569.395276] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 569.395276] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] return self._sync_wrapper(fn, *args, **kwargs) [ 569.395276] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 569.395276] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] self.wait() [ 569.395276] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 569.395276] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] self[:] = self._gt.wait() [ 569.395276] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 569.395276] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] return self._exit_event.wait() [ 569.395276] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 569.395276] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] result = hub.switch() [ 569.395276] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 569.395276] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] return self.greenlet.switch() [ 569.395637] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 569.395637] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] result = function(*args, **kwargs) [ 569.395637] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 569.395637] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] return func(*args, **kwargs) [ 569.395637] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 569.395637] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] raise e [ 569.395637] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 569.395637] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] nwinfo = self.network_api.allocate_for_instance( [ 569.395637] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 569.395637] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] created_port_ids = self._update_ports_for_instance( [ 569.395637] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 569.395637] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] with excutils.save_and_reraise_exception(): [ 569.395637] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 569.395992] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] self.force_reraise() [ 569.395992] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 569.395992] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] raise self.value [ 569.395992] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 569.395992] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] updated_port = self._update_port( [ 569.395992] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 569.395992] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] _ensure_no_port_binding_failure(port) [ 569.395992] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 569.395992] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] raise exception.PortBindingFailed(port_id=port['id']) [ 569.395992] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] nova.exception.PortBindingFailed: Binding failed for port f4597226-4835-4be9-97ae-427377d35bcf, please check neutron logs for more information. [ 569.395992] env[61629]: ERROR nova.compute.manager [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] [ 569.396316] env[61629]: DEBUG nova.compute.utils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Binding failed for port f4597226-4835-4be9-97ae-427377d35bcf, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 569.398558] env[61629]: DEBUG oslo_concurrency.lockutils [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.800s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.398558] env[61629]: DEBUG nova.objects.instance [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Lazy-loading 'resources' on Instance uuid 113fe8e6-bc12-41fe-a405-cec2aa1a717e {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 569.404706] env[61629]: DEBUG nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Build of instance 1a756eed-d5f2-4135-b522-ed06e20da1bc was re-scheduled: Binding failed for port f4597226-4835-4be9-97ae-427377d35bcf, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 569.404706] env[61629]: DEBUG nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 569.404947] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "refresh_cache-1a756eed-d5f2-4135-b522-ed06e20da1bc" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.405112] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "refresh_cache-1a756eed-d5f2-4135-b522-ed06e20da1bc" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.406299] env[61629]: DEBUG nova.network.neutron [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 569.507784] env[61629]: DEBUG nova.network.neutron [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.535018] env[61629]: DEBUG nova.virt.hardware [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 569.535018] env[61629]: DEBUG nova.virt.hardware [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 569.535018] env[61629]: DEBUG nova.virt.hardware [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 569.535018] env[61629]: DEBUG nova.virt.hardware [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 569.535264] env[61629]: DEBUG nova.virt.hardware [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 569.537555] env[61629]: DEBUG nova.virt.hardware [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 569.537555] env[61629]: DEBUG nova.virt.hardware [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 569.537555] env[61629]: DEBUG nova.virt.hardware [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 569.537555] env[61629]: DEBUG nova.virt.hardware [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 569.537555] env[61629]: DEBUG nova.virt.hardware [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 569.537869] env[61629]: DEBUG nova.virt.hardware [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 569.538919] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0ec036-2713-4a99-a2e1-65b90e4feac4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.548459] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e62db11-02cd-4756-b044-e3084faf4509 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.564079] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Instance VIF info [] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 569.571187] env[61629]: DEBUG oslo.service.loopingcall [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 569.571277] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 569.571471] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1df04ded-416a-4e19-a538-61818bb382da {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.591796] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 569.591796] env[61629]: value = "task-1353976" [ 569.591796] env[61629]: _type = "Task" [ 569.591796] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.599513] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1353976, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.632011] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "be2db738-cfe9-4720-b348-c7b03f28e96b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.632362] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "be2db738-cfe9-4720-b348-c7b03f28e96b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.699952] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "6ba7ca7d-173d-41d3-b523-3548a67397c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.700224] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "6ba7ca7d-173d-41d3-b523-3548a67397c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.705427] env[61629]: INFO nova.compute.manager [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Rebuilding instance [ 569.772254] env[61629]: DEBUG nova.compute.manager [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 569.773172] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427fc5bd-fff5-4159-84ab-0b5bf7606391 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.831858] env[61629]: DEBUG oslo_concurrency.lockutils [None req-028e3c42-3ab1-4376-aa90-ddb8b45a80df tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "d29660cc-47f8-4ca5-b21f-bcfd945abc03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.746s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.993999] env[61629]: DEBUG nova.network.neutron [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 570.017103] env[61629]: INFO nova.compute.manager [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: f128e0a7-f67b-4800-bfd6-ec65c5042460] Took 1.06 seconds to deallocate network for instance. [ 570.114718] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1353976, 'name': CreateVM_Task, 'duration_secs': 0.258764} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.114918] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 570.115369] env[61629]: DEBUG oslo_concurrency.lockutils [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.115538] env[61629]: DEBUG oslo_concurrency.lockutils [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.115822] env[61629]: DEBUG oslo_concurrency.lockutils [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 570.116083] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d35425b6-e23f-4c53-81af-fcae064f0203 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.123116] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 570.123116] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52059e4a-0303-9275-fe98-fe3d1c5694cc" [ 570.123116] env[61629]: _type = "Task" [ 570.123116] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.134830] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52059e4a-0303-9275-fe98-fe3d1c5694cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.286805] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 570.290134] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-338bf70d-2fd9-4fac-a3f9-9e4917736109 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.300036] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 570.300036] env[61629]: value = "task-1353977" [ 570.300036] env[61629]: _type = "Task" [ 570.300036] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.310930] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353977, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.312478] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d24de4-3d5e-46ac-8997-095e70e9c9f3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.321285] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795ea564-8543-4c13-84e4-5f5e3a470739 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.355171] env[61629]: DEBUG nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 570.358724] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44aca545-f829-4661-8caf-36838a241567 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.375022] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e8a1dd-e561-4fad-930b-58fbd4f2e74b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.387423] env[61629]: DEBUG nova.compute.provider_tree [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 570.585234] env[61629]: DEBUG nova.network.neutron [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.635959] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52059e4a-0303-9275-fe98-fe3d1c5694cc, 'name': SearchDatastore_Task, 'duration_secs': 0.031991} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.636362] env[61629]: DEBUG oslo_concurrency.lockutils [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.636633] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 570.637133] env[61629]: DEBUG oslo_concurrency.lockutils [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.638017] env[61629]: DEBUG oslo_concurrency.lockutils [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.638225] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 570.638510] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a48da4e7-104e-4e17-9a14-bfbdcfa2ef28 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.647144] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 570.647341] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 570.648259] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0549b937-0c25-494a-9540-3c71c135c591 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.656128] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 570.656128] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5258901d-ba5b-9a7a-5c00-60607bbe1d05" [ 570.656128] env[61629]: _type = "Task" [ 570.656128] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.667526] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5258901d-ba5b-9a7a-5c00-60607bbe1d05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.811992] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353977, 'name': PowerOffVM_Task, 'duration_secs': 0.134717} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.812246] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 570.812572] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 570.813770] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b096d3c-6c41-425e-80cf-59ede0898df1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.822237] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 570.822749] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-beaa9dbe-32ef-46eb-9b8c-975d344e7256 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.849245] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 570.849550] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 570.849741] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Deleting the datastore file [datastore1] 33029a57-19d2-45eb-b4ec-f50c47d3dc12 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 570.850018] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa5d039f-3bbf-45eb-8436-e28503a5ebeb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.859323] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 570.859323] env[61629]: value = "task-1353979" [ 570.859323] env[61629]: _type = "Task" [ 570.859323] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.873911] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353979, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.892267] env[61629]: DEBUG nova.scheduler.client.report [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 570.900954] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.087626] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "refresh_cache-1a756eed-d5f2-4135-b522-ed06e20da1bc" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.088974] env[61629]: DEBUG nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 571.088974] env[61629]: DEBUG nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 571.088974] env[61629]: DEBUG nova.network.neutron [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 571.134724] env[61629]: DEBUG nova.network.neutron [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.171993] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5258901d-ba5b-9a7a-5c00-60607bbe1d05, 'name': SearchDatastore_Task, 'duration_secs': 0.008469} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.172838] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfe46e4b-a6c9-44a9-a062-211d07b783f0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.183766] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 571.183766] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5280ecff-7f71-a8da-6703-5e35e9f8450b" [ 571.183766] env[61629]: _type = "Task" [ 571.183766] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.190033] env[61629]: INFO nova.scheduler.client.report [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Deleted allocations for instance f128e0a7-f67b-4800-bfd6-ec65c5042460 [ 571.207451] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5280ecff-7f71-a8da-6703-5e35e9f8450b, 'name': SearchDatastore_Task, 'duration_secs': 0.009647} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.207516] env[61629]: DEBUG oslo_concurrency.lockutils [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.207940] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 733343f7-99e2-4e07-94eb-1b66458d799a/733343f7-99e2-4e07-94eb-1b66458d799a.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 571.211534] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f726c06-c7b7-4e56-8aa6-9f1454e1200b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.217212] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 571.217212] env[61629]: value = "task-1353980" [ 571.217212] env[61629]: _type = "Task" [ 571.217212] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.229599] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353980, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.374095] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353979, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111275} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.374369] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 571.374549] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 571.374716] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 571.402025] env[61629]: DEBUG oslo_concurrency.lockutils [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.004s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.405786] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 13.749s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.438979] env[61629]: INFO nova.scheduler.client.report [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Deleted allocations for instance 113fe8e6-bc12-41fe-a405-cec2aa1a717e [ 571.641114] env[61629]: DEBUG nova.network.neutron [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.709525] env[61629]: DEBUG oslo_concurrency.lockutils [None req-598db7da-b376-4194-8d83-cd7c88568f83 tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Lock "f128e0a7-f67b-4800-bfd6-ec65c5042460" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.259s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.734578] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353980, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486441} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.735340] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 733343f7-99e2-4e07-94eb-1b66458d799a/733343f7-99e2-4e07-94eb-1b66458d799a.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 571.735670] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 571.736098] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d73f9592-c725-4da9-b9b4-ed535dcb0085 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.745651] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 571.745651] env[61629]: value = "task-1353981" [ 571.745651] env[61629]: _type = "Task" [ 571.745651] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.756555] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353981, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.954859] env[61629]: DEBUG oslo_concurrency.lockutils [None req-638d81f6-87a9-4fe0-bccc-ae3d7c3f7daa tempest-ServerDiagnosticsV248Test-513035346 tempest-ServerDiagnosticsV248Test-513035346-project-member] Lock "113fe8e6-bc12-41fe-a405-cec2aa1a717e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.526s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.143855] env[61629]: INFO nova.compute.manager [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 1a756eed-d5f2-4135-b522-ed06e20da1bc] Took 1.06 seconds to deallocate network for instance. [ 572.221019] env[61629]: DEBUG nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 572.258902] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353981, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064339} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.264109] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 572.264109] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee27c7b8-da04-46bf-a216-dab2dd93ece9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.289193] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 733343f7-99e2-4e07-94eb-1b66458d799a/733343f7-99e2-4e07-94eb-1b66458d799a.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 572.289362] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-019a60b2-00f0-4b8e-8f6c-a4a3ebe90f2a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.312169] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 572.312169] env[61629]: value = "task-1353982" [ 572.312169] env[61629]: _type = "Task" [ 572.312169] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.321946] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353982, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.421459] env[61629]: DEBUG nova.virt.hardware [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 572.421596] env[61629]: DEBUG nova.virt.hardware [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 572.421708] env[61629]: DEBUG nova.virt.hardware [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 572.421888] env[61629]: DEBUG nova.virt.hardware [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 572.422045] env[61629]: DEBUG nova.virt.hardware [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 572.422326] env[61629]: DEBUG nova.virt.hardware [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 572.422437] env[61629]: DEBUG nova.virt.hardware [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 572.422591] env[61629]: DEBUG nova.virt.hardware [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 572.422756] env[61629]: DEBUG nova.virt.hardware [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 572.423505] env[61629]: DEBUG nova.virt.hardware [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 572.423764] env[61629]: DEBUG nova.virt.hardware [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 572.424590] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fede1f18-4bd4-4118-8185-0c2c45d12f8c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.434293] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ab34ce-52da-4ef9-9b47-1bfac862a392 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.454169] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Instance VIF info [] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 572.461678] env[61629]: DEBUG oslo.service.loopingcall [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 572.464124] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 572.464600] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-512ea6be-0cff-4b5a-83ec-4c92b1d73979 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.487079] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 572.487079] env[61629]: value = "task-1353983" [ 572.487079] env[61629]: _type = "Task" [ 572.487079] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.499150] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1353983, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.750704] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.822338] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353982, 'name': ReconfigVM_Task, 'duration_secs': 0.262517} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.824894] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 733343f7-99e2-4e07-94eb-1b66458d799a/733343f7-99e2-4e07-94eb-1b66458d799a.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 572.824894] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0cc5421c-f493-4faf-8567-93c42da9e3cf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.831172] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 572.831172] env[61629]: value = "task-1353984" [ 572.831172] env[61629]: _type = "Task" [ 572.831172] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.843236] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353984, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.979985] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 1a756eed-d5f2-4135-b522-ed06e20da1bc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 572.980169] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 33029a57-19d2-45eb-b4ec-f50c47d3dc12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 572.980347] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 733343f7-99e2-4e07-94eb-1b66458d799a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 573.001316] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1353983, 'name': CreateVM_Task, 'duration_secs': 0.323827} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.001518] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 573.001963] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.002203] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.002490] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 573.002751] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3a69d70-34f8-4e4c-9479-46744342bad6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.013069] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 573.013069] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]523129d3-ff9d-6157-b9d1-7dc6ea9252a7" [ 573.013069] env[61629]: _type = "Task" [ 573.013069] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.023157] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]523129d3-ff9d-6157-b9d1-7dc6ea9252a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.198758] env[61629]: INFO nova.scheduler.client.report [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Deleted allocations for instance 1a756eed-d5f2-4135-b522-ed06e20da1bc [ 573.348101] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353984, 'name': Rename_Task, 'duration_secs': 0.156664} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.349963] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 573.349963] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e25ea62-576d-441c-b8c3-cb4d11eb8c3f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.361399] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 573.361399] env[61629]: value = "task-1353985" [ 573.361399] env[61629]: _type = "Task" [ 573.361399] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.371406] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353985, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.485777] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 62d7c997-cd38-43f5-a571-78a055ad05f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 573.527271] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]523129d3-ff9d-6157-b9d1-7dc6ea9252a7, 'name': SearchDatastore_Task, 'duration_secs': 0.009053} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.527623] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.529079] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 573.529143] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.532141] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.532141] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 573.532141] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-659c5fce-4d1a-45d8-9ec8-dd0682a2e618 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.538678] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 573.540301] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 573.540301] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17026b24-6c0f-400a-a02b-9d7c106ff4b0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.545585] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 573.545585] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5297e057-068f-70e1-7ec9-c61a54670575" [ 573.545585] env[61629]: _type = "Task" [ 573.545585] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.560261] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5297e057-068f-70e1-7ec9-c61a54670575, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.719873] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4b561555-0635-43ff-a44c-40165d697607 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "1a756eed-d5f2-4135-b522-ed06e20da1bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.174s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.874731] env[61629]: DEBUG oslo_vmware.api [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1353985, 'name': PowerOnVM_Task, 'duration_secs': 0.443116} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.875031] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 573.875227] env[61629]: DEBUG nova.compute.manager [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 573.876285] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40930586-adcb-47f6-8620-5da9e06dce1a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.987905] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 8d858fe9-1c97-457b-87ba-2d405bb7dcc0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 574.058040] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "26366e41-de20-432b-a37e-5abb07c4ff8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.058412] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "26366e41-de20-432b-a37e-5abb07c4ff8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.067991] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5297e057-068f-70e1-7ec9-c61a54670575, 'name': SearchDatastore_Task, 'duration_secs': 0.008433} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.069442] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc01aac6-d758-4163-a653-f4b0b58c02b1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.078782] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 574.078782] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52c40a4f-7bd9-681c-c887-9d1a73e814e7" [ 574.078782] env[61629]: _type = "Task" [ 574.078782] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.101326] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "5670d64c-bddc-4b4a-bdf0-2b039be5e49e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.101665] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "5670d64c-bddc-4b4a-bdf0-2b039be5e49e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.101916] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c40a4f-7bd9-681c-c887-9d1a73e814e7, 'name': SearchDatastore_Task, 'duration_secs': 0.008374} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.102248] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.102490] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 33029a57-19d2-45eb-b4ec-f50c47d3dc12/33029a57-19d2-45eb-b4ec-f50c47d3dc12.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 574.102822] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65718749-5431-4810-8880-10a47e52b160 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.110857] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 574.110857] env[61629]: value = "task-1353986" [ 574.110857] env[61629]: _type = "Task" [ 574.110857] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.121197] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353986, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.124185] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "67534b42-bfab-49a0-922d-8a79a13995db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.124478] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "67534b42-bfab-49a0-922d-8a79a13995db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.224122] env[61629]: DEBUG nova.compute.manager [None req-19581f8d-c03d-47c8-94e6-8dc9baae036d tempest-ServersListShow296Test-497296791 tempest-ServersListShow296Test-497296791-project-member] [instance: 643343ed-35c6-44e4-9852-55750f046fa1] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 574.399805] env[61629]: DEBUG oslo_concurrency.lockutils [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.496352] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance ce3a7a32-424a-48a4-b5c5-2a25190943f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 574.556566] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "cbcb5b42-06ab-41e4-ad08-d285b0863bfb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.559303] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "cbcb5b42-06ab-41e4-ad08-d285b0863bfb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.622271] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353986, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.730535] env[61629]: DEBUG nova.compute.manager [None req-19581f8d-c03d-47c8-94e6-8dc9baae036d tempest-ServersListShow296Test-497296791 tempest-ServersListShow296Test-497296791-project-member] [instance: 643343ed-35c6-44e4-9852-55750f046fa1] Instance disappeared before build. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 574.928356] env[61629]: DEBUG oslo_concurrency.lockutils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Acquiring lock "842633ee-19a5-44d6-bdef-c9f81e5af11e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.928581] env[61629]: DEBUG oslo_concurrency.lockutils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Lock "842633ee-19a5-44d6-bdef-c9f81e5af11e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.002427] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 575.125022] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353986, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.654859} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.126100] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 33029a57-19d2-45eb-b4ec-f50c47d3dc12/33029a57-19d2-45eb-b4ec-f50c47d3dc12.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 575.126100] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 575.126100] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-42521130-cbc8-4222-b771-c49df3c8ae16 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.132630] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 575.132630] env[61629]: value = "task-1353987" [ 575.132630] env[61629]: _type = "Task" [ 575.132630] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.148067] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353987, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.250815] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19581f8d-c03d-47c8-94e6-8dc9baae036d tempest-ServersListShow296Test-497296791 tempest-ServersListShow296Test-497296791-project-member] Lock "643343ed-35c6-44e4-9852-55750f046fa1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.970s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.503524] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance e40e1443-6d5d-41e1-9822-08b782e39d27 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 575.649773] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353987, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.20255} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.649773] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 575.650654] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8522b50-8139-4443-8c18-fed1e61ba72c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.677481] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 33029a57-19d2-45eb-b4ec-f50c47d3dc12/33029a57-19d2-45eb-b4ec-f50c47d3dc12.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 575.677852] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-662d436c-546a-440a-8668-0620403e8fb0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.700456] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 575.700456] env[61629]: value = "task-1353988" [ 575.700456] env[61629]: _type = "Task" [ 575.700456] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.711256] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353988, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.757564] env[61629]: DEBUG nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 575.821996] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Acquiring lock "c332c6fd-1edd-4d9e-85a9-32a408f9d05e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.822194] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Lock "c332c6fd-1edd-4d9e-85a9-32a408f9d05e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.007303] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 01c864cd-58a3-4061-836d-6a86ad37e4c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 576.202146] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "374062de-1242-44bd-b658-e8976f8c3b6c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.202593] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "374062de-1242-44bd-b658-e8976f8c3b6c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.214303] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353988, 'name': ReconfigVM_Task, 'duration_secs': 0.284638} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.214732] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 33029a57-19d2-45eb-b4ec-f50c47d3dc12/33029a57-19d2-45eb-b4ec-f50c47d3dc12.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 576.215801] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a046e33f-2f5f-47b1-92a5-23d5ecc4e570 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.224553] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 576.224553] env[61629]: value = "task-1353989" [ 576.224553] env[61629]: _type = "Task" [ 576.224553] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.231909] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353989, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.280474] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.515939] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 9b950dc9-d79c-4b30-8b71-1910b46ffd9b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 576.558393] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Acquiring lock "395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.558648] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Lock "395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.743708] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353989, 'name': Rename_Task, 'duration_secs': 0.150745} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.744052] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 576.744771] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7eea9a1c-e8b5-46dd-86c8-3413aa7efb89 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.754415] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 576.754415] env[61629]: value = "task-1353990" [ 576.754415] env[61629]: _type = "Task" [ 576.754415] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.767667] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353990, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.909323] env[61629]: INFO nova.compute.manager [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Rebuilding instance [ 576.971465] env[61629]: DEBUG nova.compute.manager [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 576.972432] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a426afc4-153e-4ba6-9c57-c113b130bdd7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.020802] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance b2ec37a4-09f6-428c-bca9-1ec121c9c390 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 577.265380] env[61629]: DEBUG oslo_vmware.api [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353990, 'name': PowerOnVM_Task, 'duration_secs': 0.437084} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.265837] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 577.266166] env[61629]: DEBUG nova.compute.manager [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 577.267164] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ca7a7e-092f-4311-a266-4d7fa834a2f0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.489082] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 577.489358] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86a9c4db-be57-47ba-a5b8-47537408ca31 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.498653] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Waiting for the task: (returnval){ [ 577.498653] env[61629]: value = "task-1353991" [ 577.498653] env[61629]: _type = "Task" [ 577.498653] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.513019] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': task-1353991, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.524919] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance b0343f07-0539-4395-81c8-46ca1f2a8920 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 577.790115] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.010497] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': task-1353991, 'name': PowerOffVM_Task, 'duration_secs': 0.149433} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.011338] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 578.014023] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 578.014023] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf92116-baa2-453c-b3bb-17acc103c615 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.024021] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 578.024021] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d40576fb-0021-4191-ac50-056cbe299d49 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.029671] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance b5625b76-37e3-49be-bd3b-8b864021dbd1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 578.054662] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 578.055446] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 578.055446] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Deleting the datastore file [datastore2] 733343f7-99e2-4e07-94eb-1b66458d799a {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 578.055446] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b21add3-09d2-4d3b-ac1b-2124075dc2c1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.061807] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Waiting for the task: (returnval){ [ 578.061807] env[61629]: value = "task-1353993" [ 578.061807] env[61629]: _type = "Task" [ 578.061807] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.071659] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': task-1353993, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.079497] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Acquiring lock "18b4e8c7-3517-46b2-b0a1-8d17bb222874" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.079963] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Lock "18b4e8c7-3517-46b2-b0a1-8d17bb222874" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.533894] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance d43d47a2-a27b-4bb8-9421-61805064a3d2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 578.573534] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': task-1353993, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102672} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.574060] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 578.574379] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 578.574693] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 579.039799] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 4839c06e-f55a-4162-8eae-cfaeae07cdae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 579.145678] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquiring lock "33029a57-19d2-45eb-b4ec-f50c47d3dc12" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.145932] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Lock "33029a57-19d2-45eb-b4ec-f50c47d3dc12" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.146149] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquiring lock "33029a57-19d2-45eb-b4ec-f50c47d3dc12-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.146326] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Lock "33029a57-19d2-45eb-b4ec-f50c47d3dc12-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.146509] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Lock "33029a57-19d2-45eb-b4ec-f50c47d3dc12-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.151073] env[61629]: INFO nova.compute.manager [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Terminating instance [ 579.154734] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquiring lock "refresh_cache-33029a57-19d2-45eb-b4ec-f50c47d3dc12" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.154734] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquired lock "refresh_cache-33029a57-19d2-45eb-b4ec-f50c47d3dc12" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.154734] env[61629]: DEBUG nova.network.neutron [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 579.181488] env[61629]: DEBUG oslo_concurrency.lockutils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "54e03464-0f37-4f4d-8746-821e73da0541" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.182354] env[61629]: DEBUG oslo_concurrency.lockutils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "54e03464-0f37-4f4d-8746-821e73da0541" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.545825] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance dd406dd1-0e19-400b-a862-ae51fd134017 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 579.617169] env[61629]: DEBUG nova.virt.hardware [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 579.617169] env[61629]: DEBUG nova.virt.hardware [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 579.617169] env[61629]: DEBUG nova.virt.hardware [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 579.617169] env[61629]: DEBUG nova.virt.hardware [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 579.617353] env[61629]: DEBUG nova.virt.hardware [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 579.617353] env[61629]: DEBUG nova.virt.hardware [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 579.617767] env[61629]: DEBUG nova.virt.hardware [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 579.617767] env[61629]: DEBUG nova.virt.hardware [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 579.617767] env[61629]: DEBUG nova.virt.hardware [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 579.617980] env[61629]: DEBUG nova.virt.hardware [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 579.618111] env[61629]: DEBUG nova.virt.hardware [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 579.619015] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f9646e-eb92-4ec4-a23e-6e05dc69e216 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.628228] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51ebe16-0a52-41d5-90be-58c2a3f58b13 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.642969] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Instance VIF info [] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 579.648796] env[61629]: DEBUG oslo.service.loopingcall [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 579.649118] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 579.649333] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbe7a660-3425-4365-b58e-30f31b9226aa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.667912] env[61629]: DEBUG oslo_concurrency.lockutils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "71a5a130-fd26-4cf5-9b27-520f9eb62c55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.668485] env[61629]: DEBUG oslo_concurrency.lockutils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "71a5a130-fd26-4cf5-9b27-520f9eb62c55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.671954] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 579.671954] env[61629]: value = "task-1353998" [ 579.671954] env[61629]: _type = "Task" [ 579.671954] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.681614] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1353998, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.685723] env[61629]: DEBUG nova.network.neutron [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 579.769157] env[61629]: DEBUG nova.network.neutron [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.049788] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 2315bd37-6151-42d7-8b54-9ee367be0ed1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 580.181161] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1353998, 'name': CreateVM_Task, 'duration_secs': 0.345007} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.181386] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 580.181843] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.182031] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.182523] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 580.182671] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43d03890-0f2c-46bd-ae91-aaa5588c0aab {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.187680] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Waiting for the task: (returnval){ [ 580.187680] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]523acb34-39ed-5b44-aafc-1711a7ef5ab4" [ 580.187680] env[61629]: _type = "Task" [ 580.187680] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.196195] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]523acb34-39ed-5b44-aafc-1711a7ef5ab4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.275020] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Releasing lock "refresh_cache-33029a57-19d2-45eb-b4ec-f50c47d3dc12" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.275020] env[61629]: DEBUG nova.compute.manager [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 580.275020] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 580.275020] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c540d030-98e5-4053-b6d1-d7830c70cc53 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.281766] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 580.282215] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48a3d98a-d447-42f8-b1d7-34d5c8b8d432 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.291023] env[61629]: DEBUG oslo_vmware.api [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 580.291023] env[61629]: value = "task-1353999" [ 580.291023] env[61629]: _type = "Task" [ 580.291023] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.297045] env[61629]: DEBUG oslo_vmware.api [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353999, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.553819] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance be2db738-cfe9-4720-b348-c7b03f28e96b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 580.702411] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]523acb34-39ed-5b44-aafc-1711a7ef5ab4, 'name': SearchDatastore_Task, 'duration_secs': 0.047617} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.703343] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.703343] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 580.705248] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.705248] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.705248] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 580.705248] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35600b8f-68c1-4a99-b3e7-eab3a7a7a6a6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.715753] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 580.718676] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 580.718676] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdd1beb8-9a3b-45cc-bd4f-a0a7eaf91445 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.722448] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Waiting for the task: (returnval){ [ 580.722448] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52af16be-9807-f8e7-46aa-126fac13a591" [ 580.722448] env[61629]: _type = "Task" [ 580.722448] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.730408] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52af16be-9807-f8e7-46aa-126fac13a591, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.801943] env[61629]: DEBUG oslo_vmware.api [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1353999, 'name': PowerOffVM_Task, 'duration_secs': 0.192851} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.801943] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 580.801943] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 580.802717] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9cbcf2d5-1eb9-4e95-9c52-1d8766e4af37 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.810531] env[61629]: DEBUG oslo_concurrency.lockutils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Acquiring lock "6dd1097f-7353-4938-be2b-51c248e45fe2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.810531] env[61629]: DEBUG oslo_concurrency.lockutils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Lock "6dd1097f-7353-4938-be2b-51c248e45fe2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.825321] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 580.825534] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 580.825712] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Deleting the datastore file [datastore2] 33029a57-19d2-45eb-b4ec-f50c47d3dc12 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 580.825968] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e13d7384-522b-4ba3-af2c-61df048a23e2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.832846] env[61629]: DEBUG oslo_vmware.api [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for the task: (returnval){ [ 580.832846] env[61629]: value = "task-1354001" [ 580.832846] env[61629]: _type = "Task" [ 580.832846] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.841906] env[61629]: DEBUG oslo_vmware.api [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1354001, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.062772] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 6ba7ca7d-173d-41d3-b523-3548a67397c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 581.062772] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 581.063089] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 581.238288] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52af16be-9807-f8e7-46aa-126fac13a591, 'name': SearchDatastore_Task, 'duration_secs': 0.008621} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.244940] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95c4f32d-3f25-45bc-b3a1-403604ca85a5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.248905] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Waiting for the task: (returnval){ [ 581.248905] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]520c221d-54df-91fa-be7f-9f875179a4ca" [ 581.248905] env[61629]: _type = "Task" [ 581.248905] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.257300] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]520c221d-54df-91fa-be7f-9f875179a4ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.340997] env[61629]: DEBUG oslo_vmware.api [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Task: {'id': task-1354001, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107658} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.341299] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 581.342427] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 581.342427] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 581.342427] env[61629]: INFO nova.compute.manager [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Took 1.07 seconds to destroy the instance on the hypervisor. [ 581.342427] env[61629]: DEBUG oslo.service.loopingcall [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 581.342427] env[61629]: DEBUG nova.compute.manager [-] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 581.342427] env[61629]: DEBUG nova.network.neutron [-] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 581.364033] env[61629]: DEBUG nova.network.neutron [-] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 581.570193] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec572dab-6f75-4120-a759-e2e3786488b3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.579468] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1880763b-7b9c-4f13-822b-ed4454bdef6c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.623678] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70c53d2-6fd8-4db1-925b-6e6cd55109a2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.635991] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0f2f75-cc03-4132-a7dd-4e84f0ba19eb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.653047] env[61629]: DEBUG nova.compute.provider_tree [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 581.763832] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]520c221d-54df-91fa-be7f-9f875179a4ca, 'name': SearchDatastore_Task, 'duration_secs': 0.022465} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.764159] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.764459] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 733343f7-99e2-4e07-94eb-1b66458d799a/733343f7-99e2-4e07-94eb-1b66458d799a.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 581.765147] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b87c02d-9237-48dc-ab6a-5d75fb49b936 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.773021] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Waiting for the task: (returnval){ [ 581.773021] env[61629]: value = "task-1354003" [ 581.773021] env[61629]: _type = "Task" [ 581.773021] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.783050] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': task-1354003, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.863571] env[61629]: DEBUG nova.network.neutron [-] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.158414] env[61629]: DEBUG nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 582.286024] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': task-1354003, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492605} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.286553] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 733343f7-99e2-4e07-94eb-1b66458d799a/733343f7-99e2-4e07-94eb-1b66458d799a.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 582.287093] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 582.288469] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0b65c38-589b-446a-9abb-44f73fea1dfc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.294481] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Waiting for the task: (returnval){ [ 582.294481] env[61629]: value = "task-1354004" [ 582.294481] env[61629]: _type = "Task" [ 582.294481] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.304698] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': task-1354004, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.366326] env[61629]: INFO nova.compute.manager [-] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Took 1.02 seconds to deallocate network for instance. [ 582.662954] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61629) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 582.663278] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 11.259s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.663617] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.774s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.666654] env[61629]: INFO nova.compute.claims [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 582.806465] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': task-1354004, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06448} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.806742] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 582.807543] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59092695-76b8-4a9c-8bde-97c02ddf4afe {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.838509] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 733343f7-99e2-4e07-94eb-1b66458d799a/733343f7-99e2-4e07-94eb-1b66458d799a.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 582.838831] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb4a1da2-2371-45e8-8daa-ae43ba39ab74 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.865873] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Waiting for the task: (returnval){ [ 582.865873] env[61629]: value = "task-1354005" [ 582.865873] env[61629]: _type = "Task" [ 582.865873] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.876435] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': task-1354005, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.881966] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.377513] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': task-1354005, 'name': ReconfigVM_Task, 'duration_secs': 0.410115} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.378125] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 733343f7-99e2-4e07-94eb-1b66458d799a/733343f7-99e2-4e07-94eb-1b66458d799a.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 583.378463] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b62f2a76-2a4a-44d0-ab63-a01bf3e275f9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.384670] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Waiting for the task: (returnval){ [ 583.384670] env[61629]: value = "task-1354007" [ 583.384670] env[61629]: _type = "Task" [ 583.384670] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.393024] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': task-1354007, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.897459] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': task-1354007, 'name': Rename_Task, 'duration_secs': 0.121976} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.897459] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 583.898262] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c014559d-3299-47df-b1a3-17ad99f49430 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.911351] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Waiting for the task: (returnval){ [ 583.911351] env[61629]: value = "task-1354008" [ 583.911351] env[61629]: _type = "Task" [ 583.911351] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.919861] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': task-1354008, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.223493] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2d9b04-3d48-41f3-bbbd-619e0814bf8e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.231332] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f362af67-c964-461f-b8a6-4e49c068f1c1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.266864] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3799f67-24ac-48fb-ae74-6cf640345f8b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.275550] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cabe321-65a7-44d6-b391-aa50b642730b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.290747] env[61629]: DEBUG nova.compute.provider_tree [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 584.420474] env[61629]: DEBUG oslo_vmware.api [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Task: {'id': task-1354008, 'name': PowerOnVM_Task, 'duration_secs': 0.466383} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.420960] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 584.421282] env[61629]: DEBUG nova.compute.manager [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 584.422267] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85336e18-b711-479c-86a5-1047964a6789 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.798512] env[61629]: DEBUG nova.scheduler.client.report [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 584.942615] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.305020] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.638s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.305020] env[61629]: DEBUG nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 585.307243] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.943s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.309084] env[61629]: INFO nova.compute.claims [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 585.602682] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquiring lock "733343f7-99e2-4e07-94eb-1b66458d799a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.602909] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Lock "733343f7-99e2-4e07-94eb-1b66458d799a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.603759] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquiring lock "733343f7-99e2-4e07-94eb-1b66458d799a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.603759] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Lock "733343f7-99e2-4e07-94eb-1b66458d799a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.603759] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Lock "733343f7-99e2-4e07-94eb-1b66458d799a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.609933] env[61629]: INFO nova.compute.manager [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Terminating instance [ 585.613727] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquiring lock "refresh_cache-733343f7-99e2-4e07-94eb-1b66458d799a" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.614224] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquired lock "refresh_cache-733343f7-99e2-4e07-94eb-1b66458d799a" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.614443] env[61629]: DEBUG nova.network.neutron [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 585.814126] env[61629]: DEBUG nova.compute.utils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 585.817437] env[61629]: DEBUG nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 585.817647] env[61629]: DEBUG nova.network.neutron [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 585.937071] env[61629]: DEBUG nova.policy [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38cc8b6343d54d30a3f6f13512d23020', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e7fced3a50d4821b42cf087d8111cb7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 586.141823] env[61629]: DEBUG nova.network.neutron [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.278588] env[61629]: DEBUG nova.network.neutron [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.325705] env[61629]: DEBUG nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 586.777745] env[61629]: DEBUG nova.network.neutron [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Successfully created port: 02865c61-ae10-4f73-b3a1-5027b2e9f76f {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 586.782536] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Releasing lock "refresh_cache-733343f7-99e2-4e07-94eb-1b66458d799a" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.783372] env[61629]: DEBUG nova.compute.manager [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 586.783474] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 586.784948] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e4d152-f48f-442a-b53e-84cae8df3a23 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.797589] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 586.797589] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dada9395-6d2a-4961-a6d2-ca5c5b15a052 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.803995] env[61629]: DEBUG oslo_vmware.api [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 586.803995] env[61629]: value = "task-1354010" [ 586.803995] env[61629]: _type = "Task" [ 586.803995] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.809853] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38436b29-7fd1-44c5-8b15-167131eac4b9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.818126] env[61629]: DEBUG oslo_vmware.api [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1354010, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.821257] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd12dc6-f90b-4465-9405-b61bcede721e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.858624] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ace37a-92dc-49ea-baa8-6046eb81f0f3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.869809] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e6b5b9-fded-4a83-be33-933644ea4e45 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.884102] env[61629]: DEBUG nova.compute.provider_tree [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 587.318094] env[61629]: DEBUG oslo_vmware.api [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1354010, 'name': PowerOffVM_Task, 'duration_secs': 0.180438} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.318959] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 587.319854] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 587.322017] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39f6ae21-a74d-4401-8d09-91c9523316ef {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.342768] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 587.343058] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 587.343709] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Deleting the datastore file [datastore2] 733343f7-99e2-4e07-94eb-1b66458d799a {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 587.343709] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d17eb50-5e56-42fc-9116-89714ed628ac {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.350011] env[61629]: DEBUG oslo_vmware.api [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for the task: (returnval){ [ 587.350011] env[61629]: value = "task-1354012" [ 587.350011] env[61629]: _type = "Task" [ 587.350011] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.365297] env[61629]: DEBUG nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 587.387599] env[61629]: DEBUG nova.scheduler.client.report [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 587.407203] env[61629]: DEBUG nova.virt.hardware [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 587.408291] env[61629]: DEBUG nova.virt.hardware [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 587.408539] env[61629]: DEBUG nova.virt.hardware [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 587.408795] env[61629]: DEBUG nova.virt.hardware [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 587.409323] env[61629]: DEBUG nova.virt.hardware [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 587.409563] env[61629]: DEBUG nova.virt.hardware [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 587.409838] env[61629]: DEBUG nova.virt.hardware [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 587.410276] env[61629]: DEBUG nova.virt.hardware [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 587.411628] env[61629]: DEBUG nova.virt.hardware [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 587.411900] env[61629]: DEBUG nova.virt.hardware [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 587.412680] env[61629]: DEBUG nova.virt.hardware [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 587.413127] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94eac920-7b5b-47b7-8906-b741954325e9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.425211] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be7d424-db43-46bf-8402-9342f67e677b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.864249] env[61629]: DEBUG oslo_vmware.api [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Task: {'id': task-1354012, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107433} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.864886] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 587.865329] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 587.865396] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 587.865589] env[61629]: INFO nova.compute.manager [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Took 1.08 seconds to destroy the instance on the hypervisor. [ 587.865937] env[61629]: DEBUG oslo.service.loopingcall [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 587.866224] env[61629]: DEBUG nova.compute.manager [-] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 587.866354] env[61629]: DEBUG nova.network.neutron [-] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 587.904679] env[61629]: DEBUG nova.network.neutron [-] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 587.906456] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.599s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.906916] env[61629]: DEBUG nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 587.910705] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.579s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.914495] env[61629]: INFO nova.compute.claims [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 588.051160] env[61629]: DEBUG nova.compute.manager [req-7c8bf81d-2ed9-4201-888c-f1a1374de409 req-08cf85c1-8b7f-4000-beb0-85350f6623ce service nova] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Received event network-changed-02865c61-ae10-4f73-b3a1-5027b2e9f76f {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 588.051403] env[61629]: DEBUG nova.compute.manager [req-7c8bf81d-2ed9-4201-888c-f1a1374de409 req-08cf85c1-8b7f-4000-beb0-85350f6623ce service nova] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Refreshing instance network info cache due to event network-changed-02865c61-ae10-4f73-b3a1-5027b2e9f76f. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 588.051631] env[61629]: DEBUG oslo_concurrency.lockutils [req-7c8bf81d-2ed9-4201-888c-f1a1374de409 req-08cf85c1-8b7f-4000-beb0-85350f6623ce service nova] Acquiring lock "refresh_cache-62d7c997-cd38-43f5-a571-78a055ad05f7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.051731] env[61629]: DEBUG oslo_concurrency.lockutils [req-7c8bf81d-2ed9-4201-888c-f1a1374de409 req-08cf85c1-8b7f-4000-beb0-85350f6623ce service nova] Acquired lock "refresh_cache-62d7c997-cd38-43f5-a571-78a055ad05f7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.051882] env[61629]: DEBUG nova.network.neutron [req-7c8bf81d-2ed9-4201-888c-f1a1374de409 req-08cf85c1-8b7f-4000-beb0-85350f6623ce service nova] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Refreshing network info cache for port 02865c61-ae10-4f73-b3a1-5027b2e9f76f {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 588.234260] env[61629]: ERROR nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 02865c61-ae10-4f73-b3a1-5027b2e9f76f, please check neutron logs for more information. [ 588.234260] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 588.234260] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 588.234260] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 588.234260] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 588.234260] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 588.234260] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 588.234260] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 588.234260] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 588.234260] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 588.234260] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 588.234260] env[61629]: ERROR nova.compute.manager raise self.value [ 588.234260] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 588.234260] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 588.234260] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 588.234260] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 588.234761] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 588.234761] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 588.234761] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 02865c61-ae10-4f73-b3a1-5027b2e9f76f, please check neutron logs for more information. [ 588.234761] env[61629]: ERROR nova.compute.manager [ 588.234761] env[61629]: Traceback (most recent call last): [ 588.234761] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 588.234761] env[61629]: listener.cb(fileno) [ 588.234761] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 588.234761] env[61629]: result = function(*args, **kwargs) [ 588.234761] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 588.234761] env[61629]: return func(*args, **kwargs) [ 588.234761] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 588.234761] env[61629]: raise e [ 588.234761] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 588.234761] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 588.234761] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 588.234761] env[61629]: created_port_ids = self._update_ports_for_instance( [ 588.234761] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 588.234761] env[61629]: with excutils.save_and_reraise_exception(): [ 588.234761] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 588.234761] env[61629]: self.force_reraise() [ 588.234761] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 588.234761] env[61629]: raise self.value [ 588.234761] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 588.234761] env[61629]: updated_port = self._update_port( [ 588.234761] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 588.234761] env[61629]: _ensure_no_port_binding_failure(port) [ 588.234761] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 588.234761] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 588.236018] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 02865c61-ae10-4f73-b3a1-5027b2e9f76f, please check neutron logs for more information. [ 588.236018] env[61629]: Removing descriptor: 21 [ 588.236018] env[61629]: ERROR nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 02865c61-ae10-4f73-b3a1-5027b2e9f76f, please check neutron logs for more information. [ 588.236018] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Traceback (most recent call last): [ 588.236018] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 588.236018] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] yield resources [ 588.236018] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 588.236018] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] self.driver.spawn(context, instance, image_meta, [ 588.236018] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 588.236018] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 588.236018] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 588.236018] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] vm_ref = self.build_virtual_machine(instance, [ 588.236434] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 588.236434] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] vif_infos = vmwarevif.get_vif_info(self._session, [ 588.236434] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 588.236434] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] for vif in network_info: [ 588.236434] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 588.236434] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] return self._sync_wrapper(fn, *args, **kwargs) [ 588.236434] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 588.236434] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] self.wait() [ 588.236434] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 588.236434] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] self[:] = self._gt.wait() [ 588.236434] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 588.236434] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] return self._exit_event.wait() [ 588.236434] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 588.236825] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] result = hub.switch() [ 588.236825] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 588.236825] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] return self.greenlet.switch() [ 588.236825] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 588.236825] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] result = function(*args, **kwargs) [ 588.236825] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 588.236825] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] return func(*args, **kwargs) [ 588.236825] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 588.236825] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] raise e [ 588.236825] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 588.236825] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] nwinfo = self.network_api.allocate_for_instance( [ 588.236825] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 588.236825] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] created_port_ids = self._update_ports_for_instance( [ 588.237303] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 588.237303] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] with excutils.save_and_reraise_exception(): [ 588.237303] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 588.237303] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] self.force_reraise() [ 588.237303] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 588.237303] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] raise self.value [ 588.237303] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 588.237303] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] updated_port = self._update_port( [ 588.237303] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 588.237303] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] _ensure_no_port_binding_failure(port) [ 588.237303] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 588.237303] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] raise exception.PortBindingFailed(port_id=port['id']) [ 588.237658] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] nova.exception.PortBindingFailed: Binding failed for port 02865c61-ae10-4f73-b3a1-5027b2e9f76f, please check neutron logs for more information. [ 588.237658] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] [ 588.237658] env[61629]: INFO nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Terminating instance [ 588.239405] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "refresh_cache-62d7c997-cd38-43f5-a571-78a055ad05f7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.411746] env[61629]: DEBUG nova.network.neutron [-] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.421798] env[61629]: DEBUG nova.compute.utils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 588.426445] env[61629]: DEBUG nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 588.426624] env[61629]: DEBUG nova.network.neutron [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 588.473126] env[61629]: DEBUG nova.policy [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5288b43ae51d49e2bb494b5fd147ee43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30834094374949ba8c8b3000a988074c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 588.599267] env[61629]: DEBUG nova.network.neutron [req-7c8bf81d-2ed9-4201-888c-f1a1374de409 req-08cf85c1-8b7f-4000-beb0-85350f6623ce service nova] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 588.830318] env[61629]: DEBUG nova.network.neutron [req-7c8bf81d-2ed9-4201-888c-f1a1374de409 req-08cf85c1-8b7f-4000-beb0-85350f6623ce service nova] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.875991] env[61629]: DEBUG nova.network.neutron [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Successfully created port: 255c484c-cf0c-421e-a590-e8b70f204eee {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 588.915837] env[61629]: INFO nova.compute.manager [-] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Took 1.05 seconds to deallocate network for instance. [ 588.927340] env[61629]: DEBUG nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 589.337276] env[61629]: DEBUG oslo_concurrency.lockutils [req-7c8bf81d-2ed9-4201-888c-f1a1374de409 req-08cf85c1-8b7f-4000-beb0-85350f6623ce service nova] Releasing lock "refresh_cache-62d7c997-cd38-43f5-a571-78a055ad05f7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.338251] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "refresh_cache-62d7c997-cd38-43f5-a571-78a055ad05f7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.338251] env[61629]: DEBUG nova.network.neutron [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 589.424164] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.424164] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3489d82-ed35-48d6-b067-e551a384114f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.433174] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5e4a5f-f6c8-4080-a202-6bfd459df8fe {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.475938] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9631fe70-96c3-494d-a54a-e7527bf75c9f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.485286] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afee1403-7aa8-4953-9684-dbc85933966e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.505073] env[61629]: DEBUG nova.compute.provider_tree [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.876571] env[61629]: DEBUG nova.network.neutron [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 589.945547] env[61629]: DEBUG nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 589.974252] env[61629]: DEBUG nova.virt.hardware [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 589.974422] env[61629]: DEBUG nova.virt.hardware [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 589.974465] env[61629]: DEBUG nova.virt.hardware [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 589.974635] env[61629]: DEBUG nova.virt.hardware [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 589.974777] env[61629]: DEBUG nova.virt.hardware [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 589.974921] env[61629]: DEBUG nova.virt.hardware [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 589.975140] env[61629]: DEBUG nova.virt.hardware [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 589.975309] env[61629]: DEBUG nova.virt.hardware [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 589.975465] env[61629]: DEBUG nova.virt.hardware [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 589.975657] env[61629]: DEBUG nova.virt.hardware [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 589.976477] env[61629]: DEBUG nova.virt.hardware [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 589.977376] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d548d22-7017-428e-9468-f3ba3438f6d6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.987997] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91aedd9-ef8f-4078-aec3-5df7bb1d97ce {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.007967] env[61629]: DEBUG nova.scheduler.client.report [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 590.110520] env[61629]: DEBUG nova.network.neutron [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.146054] env[61629]: DEBUG nova.compute.manager [req-0b9e8226-3e26-4dc0-ac2e-63a9a487d914 req-3749c5d4-f65e-46ae-96a3-64b52a89008f service nova] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Received event network-vif-deleted-02865c61-ae10-4f73-b3a1-5027b2e9f76f {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 590.513402] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.603s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.513924] env[61629]: DEBUG nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 590.517145] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.141s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.519109] env[61629]: INFO nova.compute.claims [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 590.566138] env[61629]: ERROR nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 255c484c-cf0c-421e-a590-e8b70f204eee, please check neutron logs for more information. [ 590.566138] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 590.566138] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 590.566138] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 590.566138] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 590.566138] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 590.566138] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 590.566138] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 590.566138] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 590.566138] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 590.566138] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 590.566138] env[61629]: ERROR nova.compute.manager raise self.value [ 590.566138] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 590.566138] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 590.566138] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 590.566138] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 590.566587] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 590.566587] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 590.566587] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 255c484c-cf0c-421e-a590-e8b70f204eee, please check neutron logs for more information. [ 590.566587] env[61629]: ERROR nova.compute.manager [ 590.566587] env[61629]: Traceback (most recent call last): [ 590.566587] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 590.566587] env[61629]: listener.cb(fileno) [ 590.566587] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 590.566587] env[61629]: result = function(*args, **kwargs) [ 590.566587] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 590.566587] env[61629]: return func(*args, **kwargs) [ 590.566587] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 590.566587] env[61629]: raise e [ 590.566587] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 590.566587] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 590.566587] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 590.566587] env[61629]: created_port_ids = self._update_ports_for_instance( [ 590.566587] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 590.566587] env[61629]: with excutils.save_and_reraise_exception(): [ 590.566587] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 590.566587] env[61629]: self.force_reraise() [ 590.566587] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 590.566587] env[61629]: raise self.value [ 590.566587] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 590.566587] env[61629]: updated_port = self._update_port( [ 590.566587] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 590.566587] env[61629]: _ensure_no_port_binding_failure(port) [ 590.566587] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 590.566587] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 590.567796] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 255c484c-cf0c-421e-a590-e8b70f204eee, please check neutron logs for more information. [ 590.567796] env[61629]: Removing descriptor: 21 [ 590.567796] env[61629]: ERROR nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 255c484c-cf0c-421e-a590-e8b70f204eee, please check neutron logs for more information. [ 590.567796] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Traceback (most recent call last): [ 590.567796] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 590.567796] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] yield resources [ 590.567796] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 590.567796] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] self.driver.spawn(context, instance, image_meta, [ 590.567796] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 590.567796] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 590.567796] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 590.567796] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] vm_ref = self.build_virtual_machine(instance, [ 590.568207] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 590.568207] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] vif_infos = vmwarevif.get_vif_info(self._session, [ 590.568207] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 590.568207] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] for vif in network_info: [ 590.568207] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 590.568207] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] return self._sync_wrapper(fn, *args, **kwargs) [ 590.568207] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 590.568207] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] self.wait() [ 590.568207] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 590.568207] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] self[:] = self._gt.wait() [ 590.568207] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 590.568207] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] return self._exit_event.wait() [ 590.568207] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 590.568634] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] result = hub.switch() [ 590.568634] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 590.568634] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] return self.greenlet.switch() [ 590.568634] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 590.568634] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] result = function(*args, **kwargs) [ 590.568634] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 590.568634] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] return func(*args, **kwargs) [ 590.568634] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 590.568634] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] raise e [ 590.568634] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 590.568634] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] nwinfo = self.network_api.allocate_for_instance( [ 590.568634] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 590.568634] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] created_port_ids = self._update_ports_for_instance( [ 590.569031] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 590.569031] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] with excutils.save_and_reraise_exception(): [ 590.569031] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 590.569031] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] self.force_reraise() [ 590.569031] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 590.569031] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] raise self.value [ 590.569031] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 590.569031] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] updated_port = self._update_port( [ 590.569031] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 590.569031] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] _ensure_no_port_binding_failure(port) [ 590.569031] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 590.569031] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] raise exception.PortBindingFailed(port_id=port['id']) [ 590.569436] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] nova.exception.PortBindingFailed: Binding failed for port 255c484c-cf0c-421e-a590-e8b70f204eee, please check neutron logs for more information. [ 590.569436] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] [ 590.569436] env[61629]: INFO nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Terminating instance [ 590.573336] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Acquiring lock "refresh_cache-8d858fe9-1c97-457b-87ba-2d405bb7dcc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.573555] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Acquired lock "refresh_cache-8d858fe9-1c97-457b-87ba-2d405bb7dcc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.573738] env[61629]: DEBUG nova.network.neutron [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 590.613091] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "refresh_cache-62d7c997-cd38-43f5-a571-78a055ad05f7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.613542] env[61629]: DEBUG nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 590.613734] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 590.614399] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ac8c5232-af59-4934-a22b-2467e8d50647 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.626586] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db611a34-3211-432e-a3be-e370be765d50 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.657248] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 62d7c997-cd38-43f5-a571-78a055ad05f7 could not be found. [ 590.657494] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 590.657666] env[61629]: INFO nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 590.658614] env[61629]: DEBUG oslo.service.loopingcall [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 590.658614] env[61629]: DEBUG nova.compute.manager [-] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 590.658614] env[61629]: DEBUG nova.network.neutron [-] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 590.686115] env[61629]: DEBUG nova.network.neutron [-] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 591.027361] env[61629]: DEBUG nova.compute.utils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 591.031047] env[61629]: DEBUG nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 591.031248] env[61629]: DEBUG nova.network.neutron [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 591.092762] env[61629]: DEBUG nova.policy [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e43fa2b15f474cbfa08199e0e38b444b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a584f8510324004b9bb8823ce27eb3d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 591.105324] env[61629]: DEBUG nova.network.neutron [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 591.190205] env[61629]: DEBUG nova.network.neutron [-] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.469978] env[61629]: DEBUG nova.network.neutron [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.533375] env[61629]: DEBUG nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 591.601019] env[61629]: DEBUG nova.network.neutron [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Successfully created port: 2288edb5-249b-4290-b09b-0a4321f47a5a {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 591.698642] env[61629]: INFO nova.compute.manager [-] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Took 1.04 seconds to deallocate network for instance. [ 591.705276] env[61629]: DEBUG nova.compute.claims [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 591.705567] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.972668] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Releasing lock "refresh_cache-8d858fe9-1c97-457b-87ba-2d405bb7dcc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.972668] env[61629]: DEBUG nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 591.972668] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 591.972668] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c9547d4-aef6-4ff2-972f-51623a47b0db {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.987151] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d32613-80cb-47c9-9bc9-a970e8986d57 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.012204] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8d858fe9-1c97-457b-87ba-2d405bb7dcc0 could not be found. [ 592.012561] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 592.012831] env[61629]: INFO nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 592.013175] env[61629]: DEBUG oslo.service.loopingcall [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 592.013466] env[61629]: DEBUG nova.compute.manager [-] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 592.013608] env[61629]: DEBUG nova.network.neutron [-] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 592.028921] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f5b695-3a91-4376-9287-465ba6763022 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.040039] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4bef4c-edcc-487f-9ec5-3d43c5fd3946 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.044046] env[61629]: DEBUG nova.network.neutron [-] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 592.083017] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14539813-200d-44c7-82cd-8532fe33b71b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.090390] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da421996-7a51-41e9-88b8-d2e19294fd27 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.106974] env[61629]: DEBUG nova.compute.provider_tree [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.548351] env[61629]: DEBUG nova.network.neutron [-] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.551434] env[61629]: DEBUG nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 592.581016] env[61629]: DEBUG nova.virt.hardware [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 592.581016] env[61629]: DEBUG nova.virt.hardware [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 592.581016] env[61629]: DEBUG nova.virt.hardware [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 592.581299] env[61629]: DEBUG nova.virt.hardware [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 592.581299] env[61629]: DEBUG nova.virt.hardware [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 592.581299] env[61629]: DEBUG nova.virt.hardware [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 592.581299] env[61629]: DEBUG nova.virt.hardware [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 592.581299] env[61629]: DEBUG nova.virt.hardware [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 592.581478] env[61629]: DEBUG nova.virt.hardware [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 592.581478] env[61629]: DEBUG nova.virt.hardware [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 592.581478] env[61629]: DEBUG nova.virt.hardware [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 592.582516] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b77233-98f2-4dff-a545-8c5e2e27de2e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.592219] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639967e4-f211-47cb-8b6b-7718842d5220 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.610792] env[61629]: DEBUG nova.scheduler.client.report [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 592.617244] env[61629]: DEBUG nova.compute.manager [req-533eaebe-b63d-48d5-8a5f-bb57fc81d5af req-ca0cb6ca-6db6-4805-a5bc-85f49c8c920b service nova] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Received event network-changed-255c484c-cf0c-421e-a590-e8b70f204eee {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 592.617484] env[61629]: DEBUG nova.compute.manager [req-533eaebe-b63d-48d5-8a5f-bb57fc81d5af req-ca0cb6ca-6db6-4805-a5bc-85f49c8c920b service nova] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Refreshing instance network info cache due to event network-changed-255c484c-cf0c-421e-a590-e8b70f204eee. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 592.617952] env[61629]: DEBUG oslo_concurrency.lockutils [req-533eaebe-b63d-48d5-8a5f-bb57fc81d5af req-ca0cb6ca-6db6-4805-a5bc-85f49c8c920b service nova] Acquiring lock "refresh_cache-8d858fe9-1c97-457b-87ba-2d405bb7dcc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.617952] env[61629]: DEBUG oslo_concurrency.lockutils [req-533eaebe-b63d-48d5-8a5f-bb57fc81d5af req-ca0cb6ca-6db6-4805-a5bc-85f49c8c920b service nova] Acquired lock "refresh_cache-8d858fe9-1c97-457b-87ba-2d405bb7dcc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.618108] env[61629]: DEBUG nova.network.neutron [req-533eaebe-b63d-48d5-8a5f-bb57fc81d5af req-ca0cb6ca-6db6-4805-a5bc-85f49c8c920b service nova] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Refreshing network info cache for port 255c484c-cf0c-421e-a590-e8b70f204eee {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 593.055275] env[61629]: INFO nova.compute.manager [-] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Took 1.04 seconds to deallocate network for instance. [ 593.060806] env[61629]: DEBUG nova.compute.claims [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 593.060806] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.123269] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.604s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.123269] env[61629]: DEBUG nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 593.135026] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.797s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.135026] env[61629]: INFO nova.compute.claims [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 593.177143] env[61629]: DEBUG nova.network.neutron [req-533eaebe-b63d-48d5-8a5f-bb57fc81d5af req-ca0cb6ca-6db6-4805-a5bc-85f49c8c920b service nova] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 593.286896] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Acquiring lock "39f7c5ee-7d07-4516-b008-40d5778cf139" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.287229] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Lock "39f7c5ee-7d07-4516-b008-40d5778cf139" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.375146] env[61629]: DEBUG nova.network.neutron [req-533eaebe-b63d-48d5-8a5f-bb57fc81d5af req-ca0cb6ca-6db6-4805-a5bc-85f49c8c920b service nova] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.641232] env[61629]: DEBUG nova.compute.utils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 593.643605] env[61629]: DEBUG nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 593.643940] env[61629]: DEBUG nova.network.neutron [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 593.749388] env[61629]: DEBUG nova.policy [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3eb918034bc84355b33da9dc20fc013a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ec60956067f8451eaae2065deb26771f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 593.819627] env[61629]: ERROR nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2288edb5-249b-4290-b09b-0a4321f47a5a, please check neutron logs for more information. [ 593.819627] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 593.819627] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 593.819627] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 593.819627] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 593.819627] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 593.819627] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 593.819627] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 593.819627] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.819627] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 593.819627] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.819627] env[61629]: ERROR nova.compute.manager raise self.value [ 593.819627] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 593.819627] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 593.819627] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.819627] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 593.820320] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.820320] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 593.820320] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2288edb5-249b-4290-b09b-0a4321f47a5a, please check neutron logs for more information. [ 593.820320] env[61629]: ERROR nova.compute.manager [ 593.820320] env[61629]: Traceback (most recent call last): [ 593.820320] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 593.820320] env[61629]: listener.cb(fileno) [ 593.820320] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 593.820320] env[61629]: result = function(*args, **kwargs) [ 593.820320] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 593.820320] env[61629]: return func(*args, **kwargs) [ 593.820320] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 593.820320] env[61629]: raise e [ 593.820320] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 593.820320] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 593.820320] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 593.820320] env[61629]: created_port_ids = self._update_ports_for_instance( [ 593.820320] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 593.820320] env[61629]: with excutils.save_and_reraise_exception(): [ 593.820320] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.820320] env[61629]: self.force_reraise() [ 593.820320] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.820320] env[61629]: raise self.value [ 593.820320] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 593.820320] env[61629]: updated_port = self._update_port( [ 593.820320] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.820320] env[61629]: _ensure_no_port_binding_failure(port) [ 593.820320] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.820320] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 593.821209] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 2288edb5-249b-4290-b09b-0a4321f47a5a, please check neutron logs for more information. [ 593.821209] env[61629]: Removing descriptor: 21 [ 593.821209] env[61629]: ERROR nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2288edb5-249b-4290-b09b-0a4321f47a5a, please check neutron logs for more information. [ 593.821209] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Traceback (most recent call last): [ 593.821209] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 593.821209] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] yield resources [ 593.821209] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 593.821209] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] self.driver.spawn(context, instance, image_meta, [ 593.821209] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 593.821209] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 593.821209] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 593.821209] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] vm_ref = self.build_virtual_machine(instance, [ 593.821546] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 593.821546] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] vif_infos = vmwarevif.get_vif_info(self._session, [ 593.821546] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 593.821546] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] for vif in network_info: [ 593.821546] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 593.821546] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] return self._sync_wrapper(fn, *args, **kwargs) [ 593.821546] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 593.821546] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] self.wait() [ 593.821546] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 593.821546] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] self[:] = self._gt.wait() [ 593.821546] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 593.821546] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] return self._exit_event.wait() [ 593.821546] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 593.821977] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] result = hub.switch() [ 593.821977] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 593.821977] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] return self.greenlet.switch() [ 593.821977] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 593.821977] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] result = function(*args, **kwargs) [ 593.821977] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 593.821977] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] return func(*args, **kwargs) [ 593.821977] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 593.821977] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] raise e [ 593.821977] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 593.821977] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] nwinfo = self.network_api.allocate_for_instance( [ 593.821977] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 593.821977] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] created_port_ids = self._update_ports_for_instance( [ 593.822383] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 593.822383] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] with excutils.save_and_reraise_exception(): [ 593.822383] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.822383] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] self.force_reraise() [ 593.822383] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.822383] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] raise self.value [ 593.822383] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 593.822383] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] updated_port = self._update_port( [ 593.822383] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.822383] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] _ensure_no_port_binding_failure(port) [ 593.822383] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.822383] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] raise exception.PortBindingFailed(port_id=port['id']) [ 593.822830] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] nova.exception.PortBindingFailed: Binding failed for port 2288edb5-249b-4290-b09b-0a4321f47a5a, please check neutron logs for more information. [ 593.822830] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] [ 593.822830] env[61629]: INFO nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Terminating instance [ 593.823576] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Acquiring lock "refresh_cache-ce3a7a32-424a-48a4-b5c5-2a25190943f5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.823576] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Acquired lock "refresh_cache-ce3a7a32-424a-48a4-b5c5-2a25190943f5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.823576] env[61629]: DEBUG nova.network.neutron [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 593.877253] env[61629]: DEBUG oslo_concurrency.lockutils [req-533eaebe-b63d-48d5-8a5f-bb57fc81d5af req-ca0cb6ca-6db6-4805-a5bc-85f49c8c920b service nova] Releasing lock "refresh_cache-8d858fe9-1c97-457b-87ba-2d405bb7dcc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.877531] env[61629]: DEBUG nova.compute.manager [req-533eaebe-b63d-48d5-8a5f-bb57fc81d5af req-ca0cb6ca-6db6-4805-a5bc-85f49c8c920b service nova] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Received event network-vif-deleted-255c484c-cf0c-421e-a590-e8b70f204eee {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 594.147646] env[61629]: DEBUG nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 594.338770] env[61629]: DEBUG nova.network.neutron [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Successfully created port: a8217668-74ac-4d0c-811c-4995094be013 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 594.348187] env[61629]: DEBUG nova.network.neutron [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.487889] env[61629]: DEBUG nova.network.neutron [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.665050] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a30c79-8c26-4e5b-9c1e-ce48239b6187 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.672344] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-436590dc-c683-492c-bdd5-13ec896cf864 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.717425] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10b9f22-396c-48cd-85b4-0ad832ea88ac {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.729945] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c34424d-ff1e-4364-b774-53e814d787b6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.744518] env[61629]: DEBUG nova.compute.provider_tree [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.802853] env[61629]: DEBUG nova.compute.manager [req-65226291-b71e-4feb-ba1b-beec8bf0d3dd req-399eb158-6971-44da-a0e2-fad808b43261 service nova] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Received event network-changed-2288edb5-249b-4290-b09b-0a4321f47a5a {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 594.803082] env[61629]: DEBUG nova.compute.manager [req-65226291-b71e-4feb-ba1b-beec8bf0d3dd req-399eb158-6971-44da-a0e2-fad808b43261 service nova] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Refreshing instance network info cache due to event network-changed-2288edb5-249b-4290-b09b-0a4321f47a5a. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 594.803275] env[61629]: DEBUG oslo_concurrency.lockutils [req-65226291-b71e-4feb-ba1b-beec8bf0d3dd req-399eb158-6971-44da-a0e2-fad808b43261 service nova] Acquiring lock "refresh_cache-ce3a7a32-424a-48a4-b5c5-2a25190943f5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.995745] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Releasing lock "refresh_cache-ce3a7a32-424a-48a4-b5c5-2a25190943f5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.995745] env[61629]: DEBUG nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 594.995745] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 594.995745] env[61629]: DEBUG oslo_concurrency.lockutils [req-65226291-b71e-4feb-ba1b-beec8bf0d3dd req-399eb158-6971-44da-a0e2-fad808b43261 service nova] Acquired lock "refresh_cache-ce3a7a32-424a-48a4-b5c5-2a25190943f5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.995745] env[61629]: DEBUG nova.network.neutron [req-65226291-b71e-4feb-ba1b-beec8bf0d3dd req-399eb158-6971-44da-a0e2-fad808b43261 service nova] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Refreshing network info cache for port 2288edb5-249b-4290-b09b-0a4321f47a5a {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 594.996824] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1bebf87e-8a3e-430e-a89b-d7bbd8bd839b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.010104] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042a0599-5b73-4e30-ab8e-7c69d1732e1d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.038195] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ce3a7a32-424a-48a4-b5c5-2a25190943f5 could not be found. [ 595.038437] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 595.038616] env[61629]: INFO nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 595.038856] env[61629]: DEBUG oslo.service.loopingcall [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 595.039087] env[61629]: DEBUG nova.compute.manager [-] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 595.039186] env[61629]: DEBUG nova.network.neutron [-] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 595.057680] env[61629]: DEBUG nova.network.neutron [-] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 595.158599] env[61629]: DEBUG nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 595.192522] env[61629]: DEBUG nova.virt.hardware [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:55:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='796290865',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-595145800',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 595.192773] env[61629]: DEBUG nova.virt.hardware [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 595.192923] env[61629]: DEBUG nova.virt.hardware [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 595.193115] env[61629]: DEBUG nova.virt.hardware [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 595.193261] env[61629]: DEBUG nova.virt.hardware [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 595.193407] env[61629]: DEBUG nova.virt.hardware [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 595.193613] env[61629]: DEBUG nova.virt.hardware [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 595.193769] env[61629]: DEBUG nova.virt.hardware [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 595.193968] env[61629]: DEBUG nova.virt.hardware [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 595.194126] env[61629]: DEBUG nova.virt.hardware [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 595.194274] env[61629]: DEBUG nova.virt.hardware [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 595.195243] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7f3f15-c28e-4201-967e-1671fbecd02f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.205085] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce34c7ca-8f67-4345-a519-a82cb1b328ff {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.248580] env[61629]: DEBUG nova.scheduler.client.report [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 595.343206] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Acquiring lock "05b868fd-401e-48b7-928f-a39c002bbe71" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.343463] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Lock "05b868fd-401e-48b7-928f-a39c002bbe71" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.522069] env[61629]: DEBUG nova.network.neutron [req-65226291-b71e-4feb-ba1b-beec8bf0d3dd req-399eb158-6971-44da-a0e2-fad808b43261 service nova] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 595.561233] env[61629]: DEBUG nova.network.neutron [-] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.636954] env[61629]: DEBUG nova.network.neutron [req-65226291-b71e-4feb-ba1b-beec8bf0d3dd req-399eb158-6971-44da-a0e2-fad808b43261 service nova] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.757464] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.624s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.758079] env[61629]: DEBUG nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 595.761214] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.997s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.762599] env[61629]: INFO nova.compute.claims [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.904025] env[61629]: DEBUG nova.compute.manager [req-3a5fce9f-9360-40b3-87b8-addd451e1249 req-8e6eb64a-1528-4e7a-bec0-aa6fd78db8ea service nova] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Received event network-changed-a8217668-74ac-4d0c-811c-4995094be013 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 595.904202] env[61629]: DEBUG nova.compute.manager [req-3a5fce9f-9360-40b3-87b8-addd451e1249 req-8e6eb64a-1528-4e7a-bec0-aa6fd78db8ea service nova] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Refreshing instance network info cache due to event network-changed-a8217668-74ac-4d0c-811c-4995094be013. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 595.904598] env[61629]: DEBUG oslo_concurrency.lockutils [req-3a5fce9f-9360-40b3-87b8-addd451e1249 req-8e6eb64a-1528-4e7a-bec0-aa6fd78db8ea service nova] Acquiring lock "refresh_cache-cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.904598] env[61629]: DEBUG oslo_concurrency.lockutils [req-3a5fce9f-9360-40b3-87b8-addd451e1249 req-8e6eb64a-1528-4e7a-bec0-aa6fd78db8ea service nova] Acquired lock "refresh_cache-cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.904775] env[61629]: DEBUG nova.network.neutron [req-3a5fce9f-9360-40b3-87b8-addd451e1249 req-8e6eb64a-1528-4e7a-bec0-aa6fd78db8ea service nova] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Refreshing network info cache for port a8217668-74ac-4d0c-811c-4995094be013 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 596.063450] env[61629]: INFO nova.compute.manager [-] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Took 1.02 seconds to deallocate network for instance. [ 596.065928] env[61629]: DEBUG nova.compute.claims [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 596.066265] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.135834] env[61629]: ERROR nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a8217668-74ac-4d0c-811c-4995094be013, please check neutron logs for more information. [ 596.135834] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 596.135834] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 596.135834] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 596.135834] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 596.135834] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 596.135834] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 596.135834] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 596.135834] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 596.135834] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 596.135834] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 596.135834] env[61629]: ERROR nova.compute.manager raise self.value [ 596.135834] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 596.135834] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 596.135834] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 596.135834] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 596.136433] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 596.136433] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 596.136433] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a8217668-74ac-4d0c-811c-4995094be013, please check neutron logs for more information. [ 596.136433] env[61629]: ERROR nova.compute.manager [ 596.136433] env[61629]: Traceback (most recent call last): [ 596.136433] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 596.136433] env[61629]: listener.cb(fileno) [ 596.136433] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 596.136433] env[61629]: result = function(*args, **kwargs) [ 596.136433] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 596.136433] env[61629]: return func(*args, **kwargs) [ 596.136433] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 596.136433] env[61629]: raise e [ 596.136433] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 596.136433] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 596.136433] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 596.136433] env[61629]: created_port_ids = self._update_ports_for_instance( [ 596.136433] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 596.136433] env[61629]: with excutils.save_and_reraise_exception(): [ 596.136433] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 596.136433] env[61629]: self.force_reraise() [ 596.136433] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 596.136433] env[61629]: raise self.value [ 596.136433] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 596.136433] env[61629]: updated_port = self._update_port( [ 596.136433] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 596.136433] env[61629]: _ensure_no_port_binding_failure(port) [ 596.136433] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 596.136433] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 596.137279] env[61629]: nova.exception.PortBindingFailed: Binding failed for port a8217668-74ac-4d0c-811c-4995094be013, please check neutron logs for more information. [ 596.137279] env[61629]: Removing descriptor: 15 [ 596.137279] env[61629]: ERROR nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a8217668-74ac-4d0c-811c-4995094be013, please check neutron logs for more information. [ 596.137279] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Traceback (most recent call last): [ 596.137279] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 596.137279] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] yield resources [ 596.137279] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 596.137279] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] self.driver.spawn(context, instance, image_meta, [ 596.137279] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 596.137279] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] self._vmops.spawn(context, instance, image_meta, injected_files, [ 596.137279] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 596.137279] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] vm_ref = self.build_virtual_machine(instance, [ 596.137809] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 596.137809] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] vif_infos = vmwarevif.get_vif_info(self._session, [ 596.137809] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 596.137809] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] for vif in network_info: [ 596.137809] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 596.137809] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] return self._sync_wrapper(fn, *args, **kwargs) [ 596.137809] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 596.137809] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] self.wait() [ 596.137809] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 596.137809] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] self[:] = self._gt.wait() [ 596.137809] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 596.137809] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] return self._exit_event.wait() [ 596.137809] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 596.138222] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] result = hub.switch() [ 596.138222] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 596.138222] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] return self.greenlet.switch() [ 596.138222] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 596.138222] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] result = function(*args, **kwargs) [ 596.138222] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 596.138222] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] return func(*args, **kwargs) [ 596.138222] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 596.138222] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] raise e [ 596.138222] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 596.138222] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] nwinfo = self.network_api.allocate_for_instance( [ 596.138222] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 596.138222] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] created_port_ids = self._update_ports_for_instance( [ 596.138617] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 596.138617] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] with excutils.save_and_reraise_exception(): [ 596.138617] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 596.138617] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] self.force_reraise() [ 596.138617] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 596.138617] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] raise self.value [ 596.138617] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 596.138617] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] updated_port = self._update_port( [ 596.138617] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 596.138617] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] _ensure_no_port_binding_failure(port) [ 596.138617] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 596.138617] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] raise exception.PortBindingFailed(port_id=port['id']) [ 596.138974] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] nova.exception.PortBindingFailed: Binding failed for port a8217668-74ac-4d0c-811c-4995094be013, please check neutron logs for more information. [ 596.138974] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] [ 596.138974] env[61629]: INFO nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Terminating instance [ 596.139498] env[61629]: DEBUG oslo_concurrency.lockutils [req-65226291-b71e-4feb-ba1b-beec8bf0d3dd req-399eb158-6971-44da-a0e2-fad808b43261 service nova] Releasing lock "refresh_cache-ce3a7a32-424a-48a4-b5c5-2a25190943f5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.139728] env[61629]: DEBUG nova.compute.manager [req-65226291-b71e-4feb-ba1b-beec8bf0d3dd req-399eb158-6971-44da-a0e2-fad808b43261 service nova] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Received event network-vif-deleted-2288edb5-249b-4290-b09b-0a4321f47a5a {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 596.140353] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Acquiring lock "refresh_cache-cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.271711] env[61629]: DEBUG nova.compute.utils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 596.274331] env[61629]: DEBUG nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 596.274496] env[61629]: DEBUG nova.network.neutron [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 596.318741] env[61629]: DEBUG nova.policy [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23e8690dc0b54968a0cc9db2088089db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb7c457c8eff4e59a4c19205bdb2d6d3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 596.443562] env[61629]: DEBUG nova.network.neutron [req-3a5fce9f-9360-40b3-87b8-addd451e1249 req-8e6eb64a-1528-4e7a-bec0-aa6fd78db8ea service nova] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.553048] env[61629]: DEBUG nova.network.neutron [req-3a5fce9f-9360-40b3-87b8-addd451e1249 req-8e6eb64a-1528-4e7a-bec0-aa6fd78db8ea service nova] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.698528] env[61629]: DEBUG nova.network.neutron [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Successfully created port: 41caef89-d69f-4c91-b9fd-6e89296aba9c {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 596.775353] env[61629]: DEBUG nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 597.055323] env[61629]: DEBUG oslo_concurrency.lockutils [req-3a5fce9f-9360-40b3-87b8-addd451e1249 req-8e6eb64a-1528-4e7a-bec0-aa6fd78db8ea service nova] Releasing lock "refresh_cache-cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.055988] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Acquired lock "refresh_cache-cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.056882] env[61629]: DEBUG nova.network.neutron [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 597.229195] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e850c55-65c5-4e79-ad78-0f8490b4bc96 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.238654] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586ae79d-8780-46b6-b04f-b58313635353 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.272026] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7019dcb-e54c-4d10-a81e-965d462fb75e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.277324] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquiring lock "3a804973-af62-4de1-a4ee-5943209c5884" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.277535] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "3a804973-af62-4de1-a4ee-5943209c5884" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.282714] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6cf309-bde7-4b35-a484-5723b940da07 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.299458] env[61629]: DEBUG nova.compute.provider_tree [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.580830] env[61629]: DEBUG nova.network.neutron [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 597.662883] env[61629]: DEBUG oslo_concurrency.lockutils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Acquiring lock "012e6d9c-0f02-4761-9639-9a8e8972ea2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.663150] env[61629]: DEBUG oslo_concurrency.lockutils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Lock "012e6d9c-0f02-4761-9639-9a8e8972ea2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.670016] env[61629]: DEBUG nova.network.neutron [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.786493] env[61629]: ERROR nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 41caef89-d69f-4c91-b9fd-6e89296aba9c, please check neutron logs for more information. [ 597.786493] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 597.786493] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.786493] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 597.786493] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 597.786493] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 597.786493] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 597.786493] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 597.786493] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.786493] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 597.786493] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.786493] env[61629]: ERROR nova.compute.manager raise self.value [ 597.786493] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 597.786493] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 597.786493] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.786493] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 597.787378] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.787378] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 597.787378] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 41caef89-d69f-4c91-b9fd-6e89296aba9c, please check neutron logs for more information. [ 597.787378] env[61629]: ERROR nova.compute.manager [ 597.787378] env[61629]: Traceback (most recent call last): [ 597.787378] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 597.787378] env[61629]: listener.cb(fileno) [ 597.787378] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 597.787378] env[61629]: result = function(*args, **kwargs) [ 597.787378] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 597.787378] env[61629]: return func(*args, **kwargs) [ 597.787378] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 597.787378] env[61629]: raise e [ 597.787378] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.787378] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 597.787378] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 597.787378] env[61629]: created_port_ids = self._update_ports_for_instance( [ 597.787378] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 597.787378] env[61629]: with excutils.save_and_reraise_exception(): [ 597.787378] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.787378] env[61629]: self.force_reraise() [ 597.787378] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.787378] env[61629]: raise self.value [ 597.787378] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 597.787378] env[61629]: updated_port = self._update_port( [ 597.787378] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.787378] env[61629]: _ensure_no_port_binding_failure(port) [ 597.787378] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.787378] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 597.788304] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 41caef89-d69f-4c91-b9fd-6e89296aba9c, please check neutron logs for more information. [ 597.788304] env[61629]: Removing descriptor: 15 [ 597.790993] env[61629]: DEBUG nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 597.802351] env[61629]: DEBUG nova.scheduler.client.report [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 597.814259] env[61629]: DEBUG nova.virt.hardware [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 597.814521] env[61629]: DEBUG nova.virt.hardware [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 597.814677] env[61629]: DEBUG nova.virt.hardware [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 597.814853] env[61629]: DEBUG nova.virt.hardware [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 597.815009] env[61629]: DEBUG nova.virt.hardware [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 597.815171] env[61629]: DEBUG nova.virt.hardware [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 597.815382] env[61629]: DEBUG nova.virt.hardware [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 597.815522] env[61629]: DEBUG nova.virt.hardware [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 597.815683] env[61629]: DEBUG nova.virt.hardware [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 597.816750] env[61629]: DEBUG nova.virt.hardware [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 597.816750] env[61629]: DEBUG nova.virt.hardware [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 597.816893] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff875f6e-c625-4de6-9147-964a6e8ee09d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.826643] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b0ab0f-c6ae-4914-aa87-cf7868b93a16 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.840170] env[61629]: ERROR nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 41caef89-d69f-4c91-b9fd-6e89296aba9c, please check neutron logs for more information. [ 597.840170] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Traceback (most recent call last): [ 597.840170] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 597.840170] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] yield resources [ 597.840170] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 597.840170] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] self.driver.spawn(context, instance, image_meta, [ 597.840170] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 597.840170] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] self._vmops.spawn(context, instance, image_meta, injected_files, [ 597.840170] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 597.840170] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] vm_ref = self.build_virtual_machine(instance, [ 597.840170] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 597.840605] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] vif_infos = vmwarevif.get_vif_info(self._session, [ 597.840605] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 597.840605] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] for vif in network_info: [ 597.840605] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 597.840605] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] return self._sync_wrapper(fn, *args, **kwargs) [ 597.840605] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 597.840605] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] self.wait() [ 597.840605] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 597.840605] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] self[:] = self._gt.wait() [ 597.840605] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 597.840605] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] return self._exit_event.wait() [ 597.840605] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 597.840605] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] current.throw(*self._exc) [ 597.841049] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 597.841049] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] result = function(*args, **kwargs) [ 597.841049] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 597.841049] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] return func(*args, **kwargs) [ 597.841049] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 597.841049] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] raise e [ 597.841049] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.841049] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] nwinfo = self.network_api.allocate_for_instance( [ 597.841049] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 597.841049] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] created_port_ids = self._update_ports_for_instance( [ 597.841049] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 597.841049] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] with excutils.save_and_reraise_exception(): [ 597.841049] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.841482] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] self.force_reraise() [ 597.841482] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.841482] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] raise self.value [ 597.841482] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 597.841482] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] updated_port = self._update_port( [ 597.841482] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.841482] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] _ensure_no_port_binding_failure(port) [ 597.841482] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.841482] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] raise exception.PortBindingFailed(port_id=port['id']) [ 597.841482] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] nova.exception.PortBindingFailed: Binding failed for port 41caef89-d69f-4c91-b9fd-6e89296aba9c, please check neutron logs for more information. [ 597.841482] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] [ 597.841482] env[61629]: INFO nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Terminating instance [ 597.843402] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Acquiring lock "refresh_cache-e40e1443-6d5d-41e1-9822-08b782e39d27" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.843402] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Acquired lock "refresh_cache-e40e1443-6d5d-41e1-9822-08b782e39d27" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.843574] env[61629]: DEBUG nova.network.neutron [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 597.933234] env[61629]: DEBUG nova.compute.manager [req-54f9c998-1002-4d4f-91ba-bf6e8a41e9dc req-3e485b85-8021-469c-a3ed-a332fe820a88 service nova] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Received event network-vif-deleted-a8217668-74ac-4d0c-811c-4995094be013 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 597.933454] env[61629]: DEBUG nova.compute.manager [req-54f9c998-1002-4d4f-91ba-bf6e8a41e9dc req-3e485b85-8021-469c-a3ed-a332fe820a88 service nova] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Received event network-changed-41caef89-d69f-4c91-b9fd-6e89296aba9c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 597.933592] env[61629]: DEBUG nova.compute.manager [req-54f9c998-1002-4d4f-91ba-bf6e8a41e9dc req-3e485b85-8021-469c-a3ed-a332fe820a88 service nova] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Refreshing instance network info cache due to event network-changed-41caef89-d69f-4c91-b9fd-6e89296aba9c. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 597.933727] env[61629]: DEBUG oslo_concurrency.lockutils [req-54f9c998-1002-4d4f-91ba-bf6e8a41e9dc req-3e485b85-8021-469c-a3ed-a332fe820a88 service nova] Acquiring lock "refresh_cache-e40e1443-6d5d-41e1-9822-08b782e39d27" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.172234] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Releasing lock "refresh_cache-cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.172710] env[61629]: DEBUG nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 598.172906] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 598.173224] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-940aaddb-c325-4003-9081-8d4430cbc6ee {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.182432] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77eea27a-5221-4320-8154-f6f4fa4064f2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.205060] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59 could not be found. [ 598.205316] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 598.205498] env[61629]: INFO nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Took 0.03 seconds to destroy the instance on the hypervisor. [ 598.205746] env[61629]: DEBUG oslo.service.loopingcall [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 598.205960] env[61629]: DEBUG nova.compute.manager [-] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 598.206067] env[61629]: DEBUG nova.network.neutron [-] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 598.222101] env[61629]: DEBUG nova.network.neutron [-] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.310157] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.549s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.310824] env[61629]: DEBUG nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 598.314879] env[61629]: DEBUG oslo_concurrency.lockutils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.898s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.315912] env[61629]: INFO nova.compute.claims [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.366183] env[61629]: DEBUG nova.network.neutron [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.462830] env[61629]: DEBUG nova.network.neutron [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.724587] env[61629]: DEBUG nova.network.neutron [-] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.820234] env[61629]: DEBUG nova.compute.utils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 598.823472] env[61629]: DEBUG nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 598.823637] env[61629]: DEBUG nova.network.neutron [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 598.866517] env[61629]: DEBUG nova.policy [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c5bb906a9a674c69b91c40b5851d25b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a13dd69e57ab4154bcc238375cda40f8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 598.965302] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Releasing lock "refresh_cache-e40e1443-6d5d-41e1-9822-08b782e39d27" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.966033] env[61629]: DEBUG nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 598.966033] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 598.966578] env[61629]: DEBUG oslo_concurrency.lockutils [req-54f9c998-1002-4d4f-91ba-bf6e8a41e9dc req-3e485b85-8021-469c-a3ed-a332fe820a88 service nova] Acquired lock "refresh_cache-e40e1443-6d5d-41e1-9822-08b782e39d27" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.966759] env[61629]: DEBUG nova.network.neutron [req-54f9c998-1002-4d4f-91ba-bf6e8a41e9dc req-3e485b85-8021-469c-a3ed-a332fe820a88 service nova] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Refreshing network info cache for port 41caef89-d69f-4c91-b9fd-6e89296aba9c {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 598.967808] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-08816511-8248-4f5f-87d4-de0d894efa1c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.978021] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb28bafe-945d-4f46-b6a1-e17f71705e19 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.001428] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e40e1443-6d5d-41e1-9822-08b782e39d27 could not be found. [ 599.001650] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 599.001827] env[61629]: INFO nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Took 0.04 seconds to destroy the instance on the hypervisor. [ 599.002079] env[61629]: DEBUG oslo.service.loopingcall [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 599.002338] env[61629]: DEBUG nova.compute.manager [-] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 599.002440] env[61629]: DEBUG nova.network.neutron [-] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 599.018314] env[61629]: DEBUG nova.network.neutron [-] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 599.182738] env[61629]: DEBUG nova.network.neutron [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Successfully created port: c65338f3-f7e6-4be6-8cbb-d35aabc9f788 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 599.227299] env[61629]: INFO nova.compute.manager [-] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Took 1.02 seconds to deallocate network for instance. [ 599.232892] env[61629]: DEBUG nova.compute.claims [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 599.233090] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.325158] env[61629]: DEBUG nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 599.488349] env[61629]: DEBUG nova.network.neutron [req-54f9c998-1002-4d4f-91ba-bf6e8a41e9dc req-3e485b85-8021-469c-a3ed-a332fe820a88 service nova] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 599.521552] env[61629]: DEBUG nova.network.neutron [-] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.595441] env[61629]: DEBUG nova.network.neutron [req-54f9c998-1002-4d4f-91ba-bf6e8a41e9dc req-3e485b85-8021-469c-a3ed-a332fe820a88 service nova] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.798639] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80fa0e7-e151-467e-a096-c816b71d551a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.806079] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab41496-0388-4cf7-86a0-61c853f0618a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.838046] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b8a229-32a3-4de8-80c4-bf9af6e0f181 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.844903] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c9c65c-7301-4caf-ba2f-fde88a4e21fe {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.858600] env[61629]: DEBUG nova.compute.provider_tree [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.997961] env[61629]: DEBUG nova.compute.manager [req-366a2dfc-9ec7-406c-a966-6cdd008e35df req-6f3164d7-35c6-4ebf-a8e0-2e20237468bb service nova] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Received event network-changed-c65338f3-f7e6-4be6-8cbb-d35aabc9f788 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 599.998136] env[61629]: DEBUG nova.compute.manager [req-366a2dfc-9ec7-406c-a966-6cdd008e35df req-6f3164d7-35c6-4ebf-a8e0-2e20237468bb service nova] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Refreshing instance network info cache due to event network-changed-c65338f3-f7e6-4be6-8cbb-d35aabc9f788. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 599.998372] env[61629]: DEBUG oslo_concurrency.lockutils [req-366a2dfc-9ec7-406c-a966-6cdd008e35df req-6f3164d7-35c6-4ebf-a8e0-2e20237468bb service nova] Acquiring lock "refresh_cache-01c864cd-58a3-4061-836d-6a86ad37e4c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.998518] env[61629]: DEBUG oslo_concurrency.lockutils [req-366a2dfc-9ec7-406c-a966-6cdd008e35df req-6f3164d7-35c6-4ebf-a8e0-2e20237468bb service nova] Acquired lock "refresh_cache-01c864cd-58a3-4061-836d-6a86ad37e4c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.998700] env[61629]: DEBUG nova.network.neutron [req-366a2dfc-9ec7-406c-a966-6cdd008e35df req-6f3164d7-35c6-4ebf-a8e0-2e20237468bb service nova] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Refreshing network info cache for port c65338f3-f7e6-4be6-8cbb-d35aabc9f788 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 600.023837] env[61629]: INFO nova.compute.manager [-] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Took 1.02 seconds to deallocate network for instance. [ 600.025947] env[61629]: DEBUG nova.compute.claims [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 600.026144] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.097741] env[61629]: DEBUG oslo_concurrency.lockutils [req-54f9c998-1002-4d4f-91ba-bf6e8a41e9dc req-3e485b85-8021-469c-a3ed-a332fe820a88 service nova] Releasing lock "refresh_cache-e40e1443-6d5d-41e1-9822-08b782e39d27" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.098019] env[61629]: DEBUG nova.compute.manager [req-54f9c998-1002-4d4f-91ba-bf6e8a41e9dc req-3e485b85-8021-469c-a3ed-a332fe820a88 service nova] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Received event network-vif-deleted-41caef89-d69f-4c91-b9fd-6e89296aba9c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 600.241466] env[61629]: ERROR nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c65338f3-f7e6-4be6-8cbb-d35aabc9f788, please check neutron logs for more information. [ 600.241466] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 600.241466] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 600.241466] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 600.241466] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 600.241466] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 600.241466] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 600.241466] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 600.241466] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.241466] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 600.241466] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.241466] env[61629]: ERROR nova.compute.manager raise self.value [ 600.241466] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 600.241466] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 600.241466] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.241466] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 600.243264] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.243264] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 600.243264] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c65338f3-f7e6-4be6-8cbb-d35aabc9f788, please check neutron logs for more information. [ 600.243264] env[61629]: ERROR nova.compute.manager [ 600.243264] env[61629]: Traceback (most recent call last): [ 600.243264] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 600.243264] env[61629]: listener.cb(fileno) [ 600.243264] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 600.243264] env[61629]: result = function(*args, **kwargs) [ 600.243264] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 600.243264] env[61629]: return func(*args, **kwargs) [ 600.243264] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 600.243264] env[61629]: raise e [ 600.243264] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 600.243264] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 600.243264] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 600.243264] env[61629]: created_port_ids = self._update_ports_for_instance( [ 600.243264] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 600.243264] env[61629]: with excutils.save_and_reraise_exception(): [ 600.243264] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.243264] env[61629]: self.force_reraise() [ 600.243264] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.243264] env[61629]: raise self.value [ 600.243264] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 600.243264] env[61629]: updated_port = self._update_port( [ 600.243264] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.243264] env[61629]: _ensure_no_port_binding_failure(port) [ 600.243264] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.243264] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 600.244205] env[61629]: nova.exception.PortBindingFailed: Binding failed for port c65338f3-f7e6-4be6-8cbb-d35aabc9f788, please check neutron logs for more information. [ 600.244205] env[61629]: Removing descriptor: 15 [ 600.344409] env[61629]: DEBUG nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 600.365493] env[61629]: DEBUG nova.scheduler.client.report [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 600.376592] env[61629]: DEBUG nova.virt.hardware [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 600.376828] env[61629]: DEBUG nova.virt.hardware [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 600.376982] env[61629]: DEBUG nova.virt.hardware [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.377180] env[61629]: DEBUG nova.virt.hardware [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 600.377324] env[61629]: DEBUG nova.virt.hardware [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.377471] env[61629]: DEBUG nova.virt.hardware [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 600.377679] env[61629]: DEBUG nova.virt.hardware [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 600.377836] env[61629]: DEBUG nova.virt.hardware [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 600.378009] env[61629]: DEBUG nova.virt.hardware [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 600.378192] env[61629]: DEBUG nova.virt.hardware [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 600.378405] env[61629]: DEBUG nova.virt.hardware [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 600.379369] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2179d202-2912-481c-b426-ba351d8b7f16 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.391601] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ad7a57-67de-4537-852d-3a6f6872c95e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.405133] env[61629]: ERROR nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c65338f3-f7e6-4be6-8cbb-d35aabc9f788, please check neutron logs for more information. [ 600.405133] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Traceback (most recent call last): [ 600.405133] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 600.405133] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] yield resources [ 600.405133] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 600.405133] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] self.driver.spawn(context, instance, image_meta, [ 600.405133] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 600.405133] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 600.405133] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 600.405133] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] vm_ref = self.build_virtual_machine(instance, [ 600.405133] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 600.405638] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] vif_infos = vmwarevif.get_vif_info(self._session, [ 600.405638] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 600.405638] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] for vif in network_info: [ 600.405638] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 600.405638] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] return self._sync_wrapper(fn, *args, **kwargs) [ 600.405638] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 600.405638] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] self.wait() [ 600.405638] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 600.405638] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] self[:] = self._gt.wait() [ 600.405638] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 600.405638] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] return self._exit_event.wait() [ 600.405638] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 600.405638] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] current.throw(*self._exc) [ 600.406044] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 600.406044] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] result = function(*args, **kwargs) [ 600.406044] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 600.406044] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] return func(*args, **kwargs) [ 600.406044] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 600.406044] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] raise e [ 600.406044] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 600.406044] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] nwinfo = self.network_api.allocate_for_instance( [ 600.406044] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 600.406044] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] created_port_ids = self._update_ports_for_instance( [ 600.406044] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 600.406044] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] with excutils.save_and_reraise_exception(): [ 600.406044] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.406469] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] self.force_reraise() [ 600.406469] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.406469] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] raise self.value [ 600.406469] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 600.406469] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] updated_port = self._update_port( [ 600.406469] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.406469] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] _ensure_no_port_binding_failure(port) [ 600.406469] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.406469] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] raise exception.PortBindingFailed(port_id=port['id']) [ 600.406469] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] nova.exception.PortBindingFailed: Binding failed for port c65338f3-f7e6-4be6-8cbb-d35aabc9f788, please check neutron logs for more information. [ 600.406469] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] [ 600.406469] env[61629]: INFO nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Terminating instance [ 600.407489] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Acquiring lock "refresh_cache-01c864cd-58a3-4061-836d-6a86ad37e4c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.526069] env[61629]: DEBUG nova.network.neutron [req-366a2dfc-9ec7-406c-a966-6cdd008e35df req-6f3164d7-35c6-4ebf-a8e0-2e20237468bb service nova] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.617690] env[61629]: DEBUG nova.network.neutron [req-366a2dfc-9ec7-406c-a966-6cdd008e35df req-6f3164d7-35c6-4ebf-a8e0-2e20237468bb service nova] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.870622] env[61629]: DEBUG oslo_concurrency.lockutils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.556s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.871111] env[61629]: DEBUG nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 600.874052] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.975s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.875521] env[61629]: INFO nova.compute.claims [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 601.121537] env[61629]: DEBUG oslo_concurrency.lockutils [req-366a2dfc-9ec7-406c-a966-6cdd008e35df req-6f3164d7-35c6-4ebf-a8e0-2e20237468bb service nova] Releasing lock "refresh_cache-01c864cd-58a3-4061-836d-6a86ad37e4c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.121537] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Acquired lock "refresh_cache-01c864cd-58a3-4061-836d-6a86ad37e4c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.121787] env[61629]: DEBUG nova.network.neutron [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 601.380452] env[61629]: DEBUG nova.compute.utils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 601.384143] env[61629]: DEBUG nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 601.384371] env[61629]: DEBUG nova.network.neutron [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 601.432648] env[61629]: DEBUG nova.policy [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3705be0096fe4e8cb2eb327db260b2e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '56ff4e9cfce2462d996817df72f7a201', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 601.640118] env[61629]: DEBUG nova.network.neutron [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.738039] env[61629]: DEBUG nova.network.neutron [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.802672] env[61629]: DEBUG nova.network.neutron [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Successfully created port: 1b65b374-ffea-4bbe-8378-6c0e4634c57c {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 601.885261] env[61629]: DEBUG nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 602.048493] env[61629]: DEBUG nova.compute.manager [req-84b4f1c7-ac02-4ea8-b092-8137e8f3a01e req-4b906781-2fce-4b0c-b9f0-4699e6d1b8fa service nova] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Received event network-vif-deleted-c65338f3-f7e6-4be6-8cbb-d35aabc9f788 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 602.242300] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Releasing lock "refresh_cache-01c864cd-58a3-4061-836d-6a86ad37e4c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.242300] env[61629]: DEBUG nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 602.242668] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 602.243276] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be0abafc-403d-46d4-bd91-310b2e6cde28 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.254197] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f265cf-f624-4322-80d2-4a7f057c0069 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.283498] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 01c864cd-58a3-4061-836d-6a86ad37e4c4 could not be found. [ 602.283763] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 602.284572] env[61629]: INFO nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 602.284572] env[61629]: DEBUG oslo.service.loopingcall [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 602.286884] env[61629]: DEBUG nova.compute.manager [-] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 602.286985] env[61629]: DEBUG nova.network.neutron [-] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 602.316317] env[61629]: DEBUG nova.network.neutron [-] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 602.403944] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8cf8cda-9505-4491-a453-ed071e53d93a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.411915] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b342d5b-fd48-4d2a-87f0-fadd73f956cd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.448245] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759bd172-2450-4033-9554-c25ff5cce608 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.456271] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b37ed56-1ac8-484c-ba94-ecb89a490c36 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.472924] env[61629]: DEBUG nova.compute.provider_tree [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 602.822359] env[61629]: DEBUG nova.network.neutron [-] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.900079] env[61629]: DEBUG nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 602.926477] env[61629]: DEBUG nova.virt.hardware [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 602.926750] env[61629]: DEBUG nova.virt.hardware [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 602.926933] env[61629]: DEBUG nova.virt.hardware [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.928042] env[61629]: DEBUG nova.virt.hardware [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 602.928042] env[61629]: DEBUG nova.virt.hardware [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.928042] env[61629]: DEBUG nova.virt.hardware [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 602.928042] env[61629]: DEBUG nova.virt.hardware [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 602.928042] env[61629]: DEBUG nova.virt.hardware [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 602.928431] env[61629]: DEBUG nova.virt.hardware [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 602.928431] env[61629]: DEBUG nova.virt.hardware [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 602.928431] env[61629]: DEBUG nova.virt.hardware [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 602.929048] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6f8423-c000-46d9-aaca-e881a3bf3f7a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.936583] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3834379b-7fd6-4f06-9359-067f4935c867 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.976286] env[61629]: DEBUG nova.scheduler.client.report [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 603.144975] env[61629]: ERROR nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1b65b374-ffea-4bbe-8378-6c0e4634c57c, please check neutron logs for more information. [ 603.144975] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 603.144975] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 603.144975] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 603.144975] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 603.144975] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 603.144975] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 603.144975] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 603.144975] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.144975] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 603.144975] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.144975] env[61629]: ERROR nova.compute.manager raise self.value [ 603.144975] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 603.144975] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 603.144975] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.144975] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 603.145522] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.145522] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 603.145522] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1b65b374-ffea-4bbe-8378-6c0e4634c57c, please check neutron logs for more information. [ 603.145522] env[61629]: ERROR nova.compute.manager [ 603.146798] env[61629]: Traceback (most recent call last): [ 603.146798] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 603.146798] env[61629]: listener.cb(fileno) [ 603.146798] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 603.146798] env[61629]: result = function(*args, **kwargs) [ 603.146798] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 603.146798] env[61629]: return func(*args, **kwargs) [ 603.146798] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 603.146798] env[61629]: raise e [ 603.146798] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 603.146798] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 603.146798] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 603.146798] env[61629]: created_port_ids = self._update_ports_for_instance( [ 603.146798] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 603.146798] env[61629]: with excutils.save_and_reraise_exception(): [ 603.146798] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.146798] env[61629]: self.force_reraise() [ 603.146798] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.146798] env[61629]: raise self.value [ 603.146798] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 603.146798] env[61629]: updated_port = self._update_port( [ 603.146798] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.146798] env[61629]: _ensure_no_port_binding_failure(port) [ 603.146798] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.146798] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 603.146798] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 1b65b374-ffea-4bbe-8378-6c0e4634c57c, please check neutron logs for more information. [ 603.146798] env[61629]: Removing descriptor: 15 [ 603.147742] env[61629]: ERROR nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1b65b374-ffea-4bbe-8378-6c0e4634c57c, please check neutron logs for more information. [ 603.147742] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Traceback (most recent call last): [ 603.147742] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 603.147742] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] yield resources [ 603.147742] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 603.147742] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] self.driver.spawn(context, instance, image_meta, [ 603.147742] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 603.147742] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 603.147742] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 603.147742] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] vm_ref = self.build_virtual_machine(instance, [ 603.147742] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 603.148166] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] vif_infos = vmwarevif.get_vif_info(self._session, [ 603.148166] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 603.148166] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] for vif in network_info: [ 603.148166] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 603.148166] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] return self._sync_wrapper(fn, *args, **kwargs) [ 603.148166] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 603.148166] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] self.wait() [ 603.148166] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 603.148166] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] self[:] = self._gt.wait() [ 603.148166] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 603.148166] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] return self._exit_event.wait() [ 603.148166] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 603.148166] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] result = hub.switch() [ 603.148587] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 603.148587] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] return self.greenlet.switch() [ 603.148587] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 603.148587] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] result = function(*args, **kwargs) [ 603.148587] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 603.148587] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] return func(*args, **kwargs) [ 603.148587] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 603.148587] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] raise e [ 603.148587] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 603.148587] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] nwinfo = self.network_api.allocate_for_instance( [ 603.148587] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 603.148587] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] created_port_ids = self._update_ports_for_instance( [ 603.148587] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 603.148983] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] with excutils.save_and_reraise_exception(): [ 603.148983] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.148983] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] self.force_reraise() [ 603.148983] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.148983] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] raise self.value [ 603.148983] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 603.148983] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] updated_port = self._update_port( [ 603.148983] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.148983] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] _ensure_no_port_binding_failure(port) [ 603.148983] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.148983] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] raise exception.PortBindingFailed(port_id=port['id']) [ 603.148983] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] nova.exception.PortBindingFailed: Binding failed for port 1b65b374-ffea-4bbe-8378-6c0e4634c57c, please check neutron logs for more information. [ 603.148983] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] [ 603.149528] env[61629]: INFO nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Terminating instance [ 603.150490] env[61629]: DEBUG oslo_concurrency.lockutils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Acquiring lock "refresh_cache-9b950dc9-d79c-4b30-8b71-1910b46ffd9b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.151124] env[61629]: DEBUG oslo_concurrency.lockutils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Acquired lock "refresh_cache-9b950dc9-d79c-4b30-8b71-1910b46ffd9b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.151124] env[61629]: DEBUG nova.network.neutron [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 603.332913] env[61629]: INFO nova.compute.manager [-] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Took 1.05 seconds to deallocate network for instance. [ 603.335329] env[61629]: DEBUG nova.compute.claims [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 603.335510] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.481103] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.481658] env[61629]: DEBUG nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 603.484415] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.734s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.486357] env[61629]: INFO nova.compute.claims [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.668259] env[61629]: DEBUG nova.network.neutron [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.761764] env[61629]: DEBUG nova.network.neutron [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.990682] env[61629]: DEBUG nova.compute.utils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.994215] env[61629]: DEBUG nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 603.994381] env[61629]: DEBUG nova.network.neutron [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 604.044615] env[61629]: DEBUG nova.policy [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cfc36f90c3b04524851115b71d527e3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bce060ec402d4c3a82dda623b417196a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 604.089712] env[61629]: DEBUG nova.compute.manager [req-788b6c33-30bd-4d64-aa57-455069bd6646 req-8ae4355a-e984-4038-a617-f314520055de service nova] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Received event network-changed-1b65b374-ffea-4bbe-8378-6c0e4634c57c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 604.089912] env[61629]: DEBUG nova.compute.manager [req-788b6c33-30bd-4d64-aa57-455069bd6646 req-8ae4355a-e984-4038-a617-f314520055de service nova] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Refreshing instance network info cache due to event network-changed-1b65b374-ffea-4bbe-8378-6c0e4634c57c. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 604.090116] env[61629]: DEBUG oslo_concurrency.lockutils [req-788b6c33-30bd-4d64-aa57-455069bd6646 req-8ae4355a-e984-4038-a617-f314520055de service nova] Acquiring lock "refresh_cache-9b950dc9-d79c-4b30-8b71-1910b46ffd9b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.265029] env[61629]: DEBUG oslo_concurrency.lockutils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Releasing lock "refresh_cache-9b950dc9-d79c-4b30-8b71-1910b46ffd9b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.265241] env[61629]: DEBUG nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 604.265436] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 604.265740] env[61629]: DEBUG oslo_concurrency.lockutils [req-788b6c33-30bd-4d64-aa57-455069bd6646 req-8ae4355a-e984-4038-a617-f314520055de service nova] Acquired lock "refresh_cache-9b950dc9-d79c-4b30-8b71-1910b46ffd9b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.265906] env[61629]: DEBUG nova.network.neutron [req-788b6c33-30bd-4d64-aa57-455069bd6646 req-8ae4355a-e984-4038-a617-f314520055de service nova] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Refreshing network info cache for port 1b65b374-ffea-4bbe-8378-6c0e4634c57c {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 604.267302] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-034e91fa-02ea-4730-8e6b-a148dc9bedfb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.278069] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8f6cf9-2479-4b6c-bcdc-187d0d237961 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.299491] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9b950dc9-d79c-4b30-8b71-1910b46ffd9b could not be found. [ 604.299694] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 604.299887] env[61629]: INFO nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 604.300118] env[61629]: DEBUG oslo.service.loopingcall [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 604.300332] env[61629]: DEBUG nova.compute.manager [-] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 604.300426] env[61629]: DEBUG nova.network.neutron [-] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 604.316050] env[61629]: DEBUG nova.network.neutron [-] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.413667] env[61629]: DEBUG nova.network.neutron [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Successfully created port: 7f4923f5-c864-45fa-8b34-99fc014ec84d {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.494953] env[61629]: DEBUG nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 604.797938] env[61629]: DEBUG nova.network.neutron [req-788b6c33-30bd-4d64-aa57-455069bd6646 req-8ae4355a-e984-4038-a617-f314520055de service nova] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.819608] env[61629]: DEBUG nova.network.neutron [-] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.912550] env[61629]: DEBUG nova.network.neutron [req-788b6c33-30bd-4d64-aa57-455069bd6646 req-8ae4355a-e984-4038-a617-f314520055de service nova] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.969979] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9eb8e4-9a81-4120-93c2-87963c1cad0b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.976898] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c2c25b-844f-43ae-b030-c4facea5ac85 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.011775] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bc0215-b3c4-403b-9658-24d673ad39cd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.018617] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53709656-5447-48f4-bb30-7d49d1f91576 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.034237] env[61629]: DEBUG nova.compute.provider_tree [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.325660] env[61629]: INFO nova.compute.manager [-] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Took 1.03 seconds to deallocate network for instance. [ 605.329761] env[61629]: DEBUG nova.compute.claims [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 605.329761] env[61629]: DEBUG oslo_concurrency.lockutils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.416126] env[61629]: DEBUG oslo_concurrency.lockutils [req-788b6c33-30bd-4d64-aa57-455069bd6646 req-8ae4355a-e984-4038-a617-f314520055de service nova] Releasing lock "refresh_cache-9b950dc9-d79c-4b30-8b71-1910b46ffd9b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.416718] env[61629]: DEBUG nova.compute.manager [req-788b6c33-30bd-4d64-aa57-455069bd6646 req-8ae4355a-e984-4038-a617-f314520055de service nova] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Received event network-vif-deleted-1b65b374-ffea-4bbe-8378-6c0e4634c57c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 605.458336] env[61629]: ERROR nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7f4923f5-c864-45fa-8b34-99fc014ec84d, please check neutron logs for more information. [ 605.458336] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 605.458336] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 605.458336] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 605.458336] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.458336] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 605.458336] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.458336] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 605.458336] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.458336] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 605.458336] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.458336] env[61629]: ERROR nova.compute.manager raise self.value [ 605.458336] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.458336] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 605.458336] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.458336] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 605.458920] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.458920] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 605.458920] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7f4923f5-c864-45fa-8b34-99fc014ec84d, please check neutron logs for more information. [ 605.458920] env[61629]: ERROR nova.compute.manager [ 605.458920] env[61629]: Traceback (most recent call last): [ 605.458920] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 605.458920] env[61629]: listener.cb(fileno) [ 605.458920] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 605.458920] env[61629]: result = function(*args, **kwargs) [ 605.458920] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 605.458920] env[61629]: return func(*args, **kwargs) [ 605.458920] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 605.458920] env[61629]: raise e [ 605.458920] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 605.458920] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 605.458920] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.458920] env[61629]: created_port_ids = self._update_ports_for_instance( [ 605.458920] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.458920] env[61629]: with excutils.save_and_reraise_exception(): [ 605.458920] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.458920] env[61629]: self.force_reraise() [ 605.458920] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.458920] env[61629]: raise self.value [ 605.458920] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.458920] env[61629]: updated_port = self._update_port( [ 605.458920] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.458920] env[61629]: _ensure_no_port_binding_failure(port) [ 605.458920] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.458920] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 605.460015] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 7f4923f5-c864-45fa-8b34-99fc014ec84d, please check neutron logs for more information. [ 605.460015] env[61629]: Removing descriptor: 15 [ 605.511685] env[61629]: DEBUG nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 605.537404] env[61629]: DEBUG nova.scheduler.client.report [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 605.542597] env[61629]: DEBUG nova.virt.hardware [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 605.542828] env[61629]: DEBUG nova.virt.hardware [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 605.542979] env[61629]: DEBUG nova.virt.hardware [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 605.543169] env[61629]: DEBUG nova.virt.hardware [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 605.543312] env[61629]: DEBUG nova.virt.hardware [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 605.543457] env[61629]: DEBUG nova.virt.hardware [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 605.543678] env[61629]: DEBUG nova.virt.hardware [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 605.543842] env[61629]: DEBUG nova.virt.hardware [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 605.544016] env[61629]: DEBUG nova.virt.hardware [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 605.544177] env[61629]: DEBUG nova.virt.hardware [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 605.544344] env[61629]: DEBUG nova.virt.hardware [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 605.545194] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa5a740-0da5-43ea-ac27-7ff77435b031 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.553841] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0a7547-1e85-45d6-8337-14277e5df2f9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.569139] env[61629]: ERROR nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7f4923f5-c864-45fa-8b34-99fc014ec84d, please check neutron logs for more information. [ 605.569139] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Traceback (most recent call last): [ 605.569139] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 605.569139] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] yield resources [ 605.569139] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 605.569139] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] self.driver.spawn(context, instance, image_meta, [ 605.569139] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 605.569139] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] self._vmops.spawn(context, instance, image_meta, injected_files, [ 605.569139] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 605.569139] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] vm_ref = self.build_virtual_machine(instance, [ 605.569139] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 605.569566] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] vif_infos = vmwarevif.get_vif_info(self._session, [ 605.569566] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 605.569566] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] for vif in network_info: [ 605.569566] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 605.569566] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] return self._sync_wrapper(fn, *args, **kwargs) [ 605.569566] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 605.569566] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] self.wait() [ 605.569566] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 605.569566] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] self[:] = self._gt.wait() [ 605.569566] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 605.569566] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] return self._exit_event.wait() [ 605.569566] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 605.569566] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] current.throw(*self._exc) [ 605.569996] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 605.569996] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] result = function(*args, **kwargs) [ 605.569996] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 605.569996] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] return func(*args, **kwargs) [ 605.569996] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 605.569996] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] raise e [ 605.569996] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 605.569996] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] nwinfo = self.network_api.allocate_for_instance( [ 605.569996] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.569996] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] created_port_ids = self._update_ports_for_instance( [ 605.569996] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.569996] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] with excutils.save_and_reraise_exception(): [ 605.569996] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.570472] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] self.force_reraise() [ 605.570472] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.570472] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] raise self.value [ 605.570472] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.570472] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] updated_port = self._update_port( [ 605.570472] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.570472] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] _ensure_no_port_binding_failure(port) [ 605.570472] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.570472] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] raise exception.PortBindingFailed(port_id=port['id']) [ 605.570472] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] nova.exception.PortBindingFailed: Binding failed for port 7f4923f5-c864-45fa-8b34-99fc014ec84d, please check neutron logs for more information. [ 605.570472] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] [ 605.570472] env[61629]: INFO nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Terminating instance [ 605.571295] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Acquiring lock "refresh_cache-b2ec37a4-09f6-428c-bca9-1ec121c9c390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.571463] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Acquired lock "refresh_cache-b2ec37a4-09f6-428c-bca9-1ec121c9c390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.571627] env[61629]: DEBUG nova.network.neutron [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 606.049499] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.565s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.050041] env[61629]: DEBUG nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 606.052607] env[61629]: DEBUG oslo_concurrency.lockutils [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.653s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.053071] env[61629]: DEBUG nova.objects.instance [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61629) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 606.090374] env[61629]: DEBUG nova.network.neutron [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.112125] env[61629]: DEBUG nova.compute.manager [req-4d3605ea-a108-4139-af82-f469d5a78f05 req-22c4b444-78ff-4886-bc39-c9d832063b06 service nova] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Received event network-changed-7f4923f5-c864-45fa-8b34-99fc014ec84d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 606.112355] env[61629]: DEBUG nova.compute.manager [req-4d3605ea-a108-4139-af82-f469d5a78f05 req-22c4b444-78ff-4886-bc39-c9d832063b06 service nova] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Refreshing instance network info cache due to event network-changed-7f4923f5-c864-45fa-8b34-99fc014ec84d. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 606.112529] env[61629]: DEBUG oslo_concurrency.lockutils [req-4d3605ea-a108-4139-af82-f469d5a78f05 req-22c4b444-78ff-4886-bc39-c9d832063b06 service nova] Acquiring lock "refresh_cache-b2ec37a4-09f6-428c-bca9-1ec121c9c390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.147715] env[61629]: DEBUG nova.network.neutron [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.560426] env[61629]: DEBUG nova.compute.utils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 606.562866] env[61629]: DEBUG nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 606.565320] env[61629]: DEBUG nova.network.neutron [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 606.622944] env[61629]: DEBUG nova.policy [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68d9c63a05c3428dacef4028a6d4bf8f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '867bc3e757c041199307af3d1fccf7cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 606.652065] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Releasing lock "refresh_cache-b2ec37a4-09f6-428c-bca9-1ec121c9c390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.652065] env[61629]: DEBUG nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 606.652065] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 606.652065] env[61629]: DEBUG oslo_concurrency.lockutils [req-4d3605ea-a108-4139-af82-f469d5a78f05 req-22c4b444-78ff-4886-bc39-c9d832063b06 service nova] Acquired lock "refresh_cache-b2ec37a4-09f6-428c-bca9-1ec121c9c390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.652065] env[61629]: DEBUG nova.network.neutron [req-4d3605ea-a108-4139-af82-f469d5a78f05 req-22c4b444-78ff-4886-bc39-c9d832063b06 service nova] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Refreshing network info cache for port 7f4923f5-c864-45fa-8b34-99fc014ec84d {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 606.653608] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8dbc41fb-28c4-4d07-a0f7-2befe7ce1bd8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.663739] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352bd440-57e1-4608-a9fa-3c6736eb757a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.687365] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b2ec37a4-09f6-428c-bca9-1ec121c9c390 could not be found. [ 606.687706] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 606.687818] env[61629]: INFO nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Took 0.04 seconds to destroy the instance on the hypervisor. [ 606.688039] env[61629]: DEBUG oslo.service.loopingcall [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 606.688328] env[61629]: DEBUG nova.compute.manager [-] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 606.688961] env[61629]: DEBUG nova.network.neutron [-] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 606.704419] env[61629]: DEBUG nova.network.neutron [-] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.954808] env[61629]: DEBUG nova.network.neutron [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Successfully created port: 49005889-bfb0-4cb2-aecc-2e83d8b90c66 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 607.067029] env[61629]: DEBUG nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 607.067670] env[61629]: DEBUG oslo_concurrency.lockutils [None req-251c6d8c-a51f-4f8b-9f31-a519e9f240d1 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.068820] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.789s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.070296] env[61629]: INFO nova.compute.claims [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 607.174681] env[61629]: DEBUG nova.network.neutron [req-4d3605ea-a108-4139-af82-f469d5a78f05 req-22c4b444-78ff-4886-bc39-c9d832063b06 service nova] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.207486] env[61629]: DEBUG nova.network.neutron [-] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.227259] env[61629]: DEBUG nova.network.neutron [req-4d3605ea-a108-4139-af82-f469d5a78f05 req-22c4b444-78ff-4886-bc39-c9d832063b06 service nova] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.710059] env[61629]: INFO nova.compute.manager [-] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Took 1.02 seconds to deallocate network for instance. [ 607.715414] env[61629]: DEBUG nova.compute.claims [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 607.715601] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.729289] env[61629]: DEBUG oslo_concurrency.lockutils [req-4d3605ea-a108-4139-af82-f469d5a78f05 req-22c4b444-78ff-4886-bc39-c9d832063b06 service nova] Releasing lock "refresh_cache-b2ec37a4-09f6-428c-bca9-1ec121c9c390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.729545] env[61629]: DEBUG nova.compute.manager [req-4d3605ea-a108-4139-af82-f469d5a78f05 req-22c4b444-78ff-4886-bc39-c9d832063b06 service nova] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Received event network-vif-deleted-7f4923f5-c864-45fa-8b34-99fc014ec84d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 607.923457] env[61629]: ERROR nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 49005889-bfb0-4cb2-aecc-2e83d8b90c66, please check neutron logs for more information. [ 607.923457] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 607.923457] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.923457] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 607.923457] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.923457] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 607.923457] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.923457] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 607.923457] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.923457] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 607.923457] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.923457] env[61629]: ERROR nova.compute.manager raise self.value [ 607.923457] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.923457] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 607.923457] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.923457] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 607.923840] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.923840] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 607.923840] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 49005889-bfb0-4cb2-aecc-2e83d8b90c66, please check neutron logs for more information. [ 607.923840] env[61629]: ERROR nova.compute.manager [ 607.923840] env[61629]: Traceback (most recent call last): [ 607.923840] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 607.923840] env[61629]: listener.cb(fileno) [ 607.923840] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.923840] env[61629]: result = function(*args, **kwargs) [ 607.923840] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 607.923840] env[61629]: return func(*args, **kwargs) [ 607.923840] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 607.923840] env[61629]: raise e [ 607.923840] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.923840] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 607.923840] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.923840] env[61629]: created_port_ids = self._update_ports_for_instance( [ 607.923840] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.923840] env[61629]: with excutils.save_and_reraise_exception(): [ 607.923840] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.923840] env[61629]: self.force_reraise() [ 607.923840] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.923840] env[61629]: raise self.value [ 607.923840] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.923840] env[61629]: updated_port = self._update_port( [ 607.923840] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.923840] env[61629]: _ensure_no_port_binding_failure(port) [ 607.923840] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.923840] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 607.924517] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 49005889-bfb0-4cb2-aecc-2e83d8b90c66, please check neutron logs for more information. [ 607.924517] env[61629]: Removing descriptor: 15 [ 608.077141] env[61629]: DEBUG nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 608.101444] env[61629]: DEBUG nova.virt.hardware [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 608.102244] env[61629]: DEBUG nova.virt.hardware [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 608.102244] env[61629]: DEBUG nova.virt.hardware [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 608.102244] env[61629]: DEBUG nova.virt.hardware [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 608.102354] env[61629]: DEBUG nova.virt.hardware [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 608.102521] env[61629]: DEBUG nova.virt.hardware [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 608.102730] env[61629]: DEBUG nova.virt.hardware [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 608.102888] env[61629]: DEBUG nova.virt.hardware [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 608.103105] env[61629]: DEBUG nova.virt.hardware [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 608.103218] env[61629]: DEBUG nova.virt.hardware [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 608.103445] env[61629]: DEBUG nova.virt.hardware [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 608.104271] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1042ed43-638a-4399-b7dd-544b9199190b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.115175] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ece53b-42bb-4765-8f0c-c4b60b790c84 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.129651] env[61629]: ERROR nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 49005889-bfb0-4cb2-aecc-2e83d8b90c66, please check neutron logs for more information. [ 608.129651] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Traceback (most recent call last): [ 608.129651] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 608.129651] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] yield resources [ 608.129651] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 608.129651] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] self.driver.spawn(context, instance, image_meta, [ 608.129651] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 608.129651] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] self._vmops.spawn(context, instance, image_meta, injected_files, [ 608.129651] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 608.129651] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] vm_ref = self.build_virtual_machine(instance, [ 608.129651] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 608.129942] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] vif_infos = vmwarevif.get_vif_info(self._session, [ 608.129942] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 608.129942] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] for vif in network_info: [ 608.129942] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 608.129942] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] return self._sync_wrapper(fn, *args, **kwargs) [ 608.129942] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 608.129942] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] self.wait() [ 608.129942] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 608.129942] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] self[:] = self._gt.wait() [ 608.129942] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 608.129942] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] return self._exit_event.wait() [ 608.129942] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 608.129942] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] current.throw(*self._exc) [ 608.130249] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 608.130249] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] result = function(*args, **kwargs) [ 608.130249] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 608.130249] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] return func(*args, **kwargs) [ 608.130249] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 608.130249] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] raise e [ 608.130249] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.130249] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] nwinfo = self.network_api.allocate_for_instance( [ 608.130249] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 608.130249] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] created_port_ids = self._update_ports_for_instance( [ 608.130249] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 608.130249] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] with excutils.save_and_reraise_exception(): [ 608.130249] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.130579] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] self.force_reraise() [ 608.130579] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.130579] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] raise self.value [ 608.130579] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 608.130579] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] updated_port = self._update_port( [ 608.130579] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.130579] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] _ensure_no_port_binding_failure(port) [ 608.130579] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.130579] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] raise exception.PortBindingFailed(port_id=port['id']) [ 608.130579] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] nova.exception.PortBindingFailed: Binding failed for port 49005889-bfb0-4cb2-aecc-2e83d8b90c66, please check neutron logs for more information. [ 608.130579] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] [ 608.130579] env[61629]: INFO nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Terminating instance [ 608.133790] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Acquiring lock "refresh_cache-b0343f07-0539-4395-81c8-46ca1f2a8920" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.133944] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Acquired lock "refresh_cache-b0343f07-0539-4395-81c8-46ca1f2a8920" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.134123] env[61629]: DEBUG nova.network.neutron [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 608.136700] env[61629]: DEBUG nova.compute.manager [req-c07b8446-124c-4f71-8adc-ffb22ba41ba6 req-95feb6fc-d19e-4ea2-975d-b6b3018519a0 service nova] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Received event network-changed-49005889-bfb0-4cb2-aecc-2e83d8b90c66 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 608.136879] env[61629]: DEBUG nova.compute.manager [req-c07b8446-124c-4f71-8adc-ffb22ba41ba6 req-95feb6fc-d19e-4ea2-975d-b6b3018519a0 service nova] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Refreshing instance network info cache due to event network-changed-49005889-bfb0-4cb2-aecc-2e83d8b90c66. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 608.137068] env[61629]: DEBUG oslo_concurrency.lockutils [req-c07b8446-124c-4f71-8adc-ffb22ba41ba6 req-95feb6fc-d19e-4ea2-975d-b6b3018519a0 service nova] Acquiring lock "refresh_cache-b0343f07-0539-4395-81c8-46ca1f2a8920" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.449022] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4562dc62-175f-474d-80bc-46abdb69cb09 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.456464] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610ab50c-4887-4148-96e7-8d2dc815186c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.486829] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130a9133-27dc-4da7-a141-62221614c024 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.494054] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e612333-4d33-4e24-b755-ac39627737f1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.507566] env[61629]: DEBUG nova.compute.provider_tree [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 608.652702] env[61629]: DEBUG nova.network.neutron [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.727780] env[61629]: DEBUG nova.network.neutron [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.010585] env[61629]: DEBUG nova.scheduler.client.report [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 609.230982] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Releasing lock "refresh_cache-b0343f07-0539-4395-81c8-46ca1f2a8920" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.231587] env[61629]: DEBUG nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 609.231881] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 609.232317] env[61629]: DEBUG oslo_concurrency.lockutils [req-c07b8446-124c-4f71-8adc-ffb22ba41ba6 req-95feb6fc-d19e-4ea2-975d-b6b3018519a0 service nova] Acquired lock "refresh_cache-b0343f07-0539-4395-81c8-46ca1f2a8920" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.232517] env[61629]: DEBUG nova.network.neutron [req-c07b8446-124c-4f71-8adc-ffb22ba41ba6 req-95feb6fc-d19e-4ea2-975d-b6b3018519a0 service nova] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Refreshing network info cache for port 49005889-bfb0-4cb2-aecc-2e83d8b90c66 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 609.233687] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-924ed315-bfed-4f87-8061-1d0d3fcba7eb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.243439] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812ec739-a072-41da-bf5b-8cfbccf41607 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.269573] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b0343f07-0539-4395-81c8-46ca1f2a8920 could not be found. [ 609.269573] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 609.269573] env[61629]: INFO nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Took 0.04 seconds to destroy the instance on the hypervisor. [ 609.269573] env[61629]: DEBUG oslo.service.loopingcall [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 609.269573] env[61629]: DEBUG nova.compute.manager [-] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 609.269955] env[61629]: DEBUG nova.network.neutron [-] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 609.295578] env[61629]: DEBUG nova.network.neutron [-] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.516609] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.517736] env[61629]: DEBUG nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 609.520318] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.731s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.520499] env[61629]: DEBUG nova.objects.instance [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61629) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 609.752873] env[61629]: DEBUG nova.network.neutron [req-c07b8446-124c-4f71-8adc-ffb22ba41ba6 req-95feb6fc-d19e-4ea2-975d-b6b3018519a0 service nova] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.798031] env[61629]: DEBUG nova.network.neutron [-] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.825166] env[61629]: DEBUG nova.network.neutron [req-c07b8446-124c-4f71-8adc-ffb22ba41ba6 req-95feb6fc-d19e-4ea2-975d-b6b3018519a0 service nova] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.025122] env[61629]: DEBUG nova.compute.utils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 610.026748] env[61629]: DEBUG nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 610.027064] env[61629]: DEBUG nova.network.neutron [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 610.076947] env[61629]: DEBUG nova.policy [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a1c079b4e8f4a2ea94ed8c18ccbe4ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6855531410a74844adde963459cfb99e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 610.301956] env[61629]: INFO nova.compute.manager [-] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Took 1.03 seconds to deallocate network for instance. [ 610.306201] env[61629]: DEBUG nova.compute.claims [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 610.306401] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.328231] env[61629]: DEBUG oslo_concurrency.lockutils [req-c07b8446-124c-4f71-8adc-ffb22ba41ba6 req-95feb6fc-d19e-4ea2-975d-b6b3018519a0 service nova] Releasing lock "refresh_cache-b0343f07-0539-4395-81c8-46ca1f2a8920" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.328491] env[61629]: DEBUG nova.compute.manager [req-c07b8446-124c-4f71-8adc-ffb22ba41ba6 req-95feb6fc-d19e-4ea2-975d-b6b3018519a0 service nova] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Received event network-vif-deleted-49005889-bfb0-4cb2-aecc-2e83d8b90c66 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 610.349562] env[61629]: DEBUG nova.network.neutron [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Successfully created port: fedaf39a-b596-4e1d-944b-71449da0184a {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 610.529619] env[61629]: DEBUG nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 610.534808] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8184eb22-a055-4801-80af-47a9466b6520 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.536674] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.655s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.537018] env[61629]: DEBUG nova.objects.instance [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Lazy-loading 'resources' on Instance uuid 33029a57-19d2-45eb-b4ec-f50c47d3dc12 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 611.177206] env[61629]: DEBUG nova.compute.manager [req-78503d36-e3a6-4318-82fc-cae26b6d874a req-ebfb1ccc-ac6a-4fe8-99ac-cf6791b7757b service nova] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Received event network-changed-fedaf39a-b596-4e1d-944b-71449da0184a {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 611.177403] env[61629]: DEBUG nova.compute.manager [req-78503d36-e3a6-4318-82fc-cae26b6d874a req-ebfb1ccc-ac6a-4fe8-99ac-cf6791b7757b service nova] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Refreshing instance network info cache due to event network-changed-fedaf39a-b596-4e1d-944b-71449da0184a. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 611.177614] env[61629]: DEBUG oslo_concurrency.lockutils [req-78503d36-e3a6-4318-82fc-cae26b6d874a req-ebfb1ccc-ac6a-4fe8-99ac-cf6791b7757b service nova] Acquiring lock "refresh_cache-b5625b76-37e3-49be-bd3b-8b864021dbd1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.177752] env[61629]: DEBUG oslo_concurrency.lockutils [req-78503d36-e3a6-4318-82fc-cae26b6d874a req-ebfb1ccc-ac6a-4fe8-99ac-cf6791b7757b service nova] Acquired lock "refresh_cache-b5625b76-37e3-49be-bd3b-8b864021dbd1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.177905] env[61629]: DEBUG nova.network.neutron [req-78503d36-e3a6-4318-82fc-cae26b6d874a req-ebfb1ccc-ac6a-4fe8-99ac-cf6791b7757b service nova] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Refreshing network info cache for port fedaf39a-b596-4e1d-944b-71449da0184a {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 611.365500] env[61629]: ERROR nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fedaf39a-b596-4e1d-944b-71449da0184a, please check neutron logs for more information. [ 611.365500] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 611.365500] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 611.365500] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 611.365500] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 611.365500] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 611.365500] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 611.365500] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 611.365500] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 611.365500] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 611.365500] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 611.365500] env[61629]: ERROR nova.compute.manager raise self.value [ 611.365500] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 611.365500] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 611.365500] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 611.365500] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 611.366028] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 611.366028] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 611.366028] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fedaf39a-b596-4e1d-944b-71449da0184a, please check neutron logs for more information. [ 611.366028] env[61629]: ERROR nova.compute.manager [ 611.366028] env[61629]: Traceback (most recent call last): [ 611.366028] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 611.366028] env[61629]: listener.cb(fileno) [ 611.366028] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 611.366028] env[61629]: result = function(*args, **kwargs) [ 611.366028] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 611.366028] env[61629]: return func(*args, **kwargs) [ 611.366028] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 611.366028] env[61629]: raise e [ 611.366028] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 611.366028] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 611.366028] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 611.366028] env[61629]: created_port_ids = self._update_ports_for_instance( [ 611.366028] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 611.366028] env[61629]: with excutils.save_and_reraise_exception(): [ 611.366028] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 611.366028] env[61629]: self.force_reraise() [ 611.366028] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 611.366028] env[61629]: raise self.value [ 611.366028] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 611.366028] env[61629]: updated_port = self._update_port( [ 611.366028] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 611.366028] env[61629]: _ensure_no_port_binding_failure(port) [ 611.366028] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 611.366028] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 611.366995] env[61629]: nova.exception.PortBindingFailed: Binding failed for port fedaf39a-b596-4e1d-944b-71449da0184a, please check neutron logs for more information. [ 611.366995] env[61629]: Removing descriptor: 15 [ 611.482231] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dedadfd6-1783-4e3f-bb78-ebc419a006a6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.490794] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e4f7d1-0957-4169-b878-c8efc1c717a1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.519687] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dda8e50-d3f1-4272-9438-b7fafeadb5b4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.526841] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2dc0eb-b73b-4ac3-be1a-4e02c1677e3c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.539621] env[61629]: DEBUG nova.compute.provider_tree [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.549075] env[61629]: DEBUG nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 611.574263] env[61629]: DEBUG nova.virt.hardware [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 611.574523] env[61629]: DEBUG nova.virt.hardware [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 611.574679] env[61629]: DEBUG nova.virt.hardware [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 611.574858] env[61629]: DEBUG nova.virt.hardware [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 611.574999] env[61629]: DEBUG nova.virt.hardware [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 611.575164] env[61629]: DEBUG nova.virt.hardware [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 611.575367] env[61629]: DEBUG nova.virt.hardware [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 611.575523] env[61629]: DEBUG nova.virt.hardware [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 611.575686] env[61629]: DEBUG nova.virt.hardware [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 611.575843] env[61629]: DEBUG nova.virt.hardware [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 611.576022] env[61629]: DEBUG nova.virt.hardware [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 611.576860] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d045cfb5-5572-4687-a3ce-064fd4a733d5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.586068] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceffb29e-87c9-45f4-bd4e-1e735dcc9fd4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.599114] env[61629]: ERROR nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fedaf39a-b596-4e1d-944b-71449da0184a, please check neutron logs for more information. [ 611.599114] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Traceback (most recent call last): [ 611.599114] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 611.599114] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] yield resources [ 611.599114] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 611.599114] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] self.driver.spawn(context, instance, image_meta, [ 611.599114] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 611.599114] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 611.599114] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 611.599114] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] vm_ref = self.build_virtual_machine(instance, [ 611.599114] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 611.599485] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] vif_infos = vmwarevif.get_vif_info(self._session, [ 611.599485] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 611.599485] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] for vif in network_info: [ 611.599485] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 611.599485] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] return self._sync_wrapper(fn, *args, **kwargs) [ 611.599485] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 611.599485] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] self.wait() [ 611.599485] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 611.599485] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] self[:] = self._gt.wait() [ 611.599485] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 611.599485] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] return self._exit_event.wait() [ 611.599485] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 611.599485] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] current.throw(*self._exc) [ 611.599851] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 611.599851] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] result = function(*args, **kwargs) [ 611.599851] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 611.599851] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] return func(*args, **kwargs) [ 611.599851] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 611.599851] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] raise e [ 611.599851] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 611.599851] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] nwinfo = self.network_api.allocate_for_instance( [ 611.599851] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 611.599851] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] created_port_ids = self._update_ports_for_instance( [ 611.599851] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 611.599851] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] with excutils.save_and_reraise_exception(): [ 611.599851] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 611.600177] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] self.force_reraise() [ 611.600177] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 611.600177] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] raise self.value [ 611.600177] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 611.600177] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] updated_port = self._update_port( [ 611.600177] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 611.600177] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] _ensure_no_port_binding_failure(port) [ 611.600177] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 611.600177] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] raise exception.PortBindingFailed(port_id=port['id']) [ 611.600177] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] nova.exception.PortBindingFailed: Binding failed for port fedaf39a-b596-4e1d-944b-71449da0184a, please check neutron logs for more information. [ 611.600177] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] [ 611.600177] env[61629]: INFO nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Terminating instance [ 611.601445] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Acquiring lock "refresh_cache-b5625b76-37e3-49be-bd3b-8b864021dbd1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.699106] env[61629]: DEBUG nova.network.neutron [req-78503d36-e3a6-4318-82fc-cae26b6d874a req-ebfb1ccc-ac6a-4fe8-99ac-cf6791b7757b service nova] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.779615] env[61629]: DEBUG nova.network.neutron [req-78503d36-e3a6-4318-82fc-cae26b6d874a req-ebfb1ccc-ac6a-4fe8-99ac-cf6791b7757b service nova] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.043398] env[61629]: DEBUG nova.scheduler.client.report [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 612.282316] env[61629]: DEBUG oslo_concurrency.lockutils [req-78503d36-e3a6-4318-82fc-cae26b6d874a req-ebfb1ccc-ac6a-4fe8-99ac-cf6791b7757b service nova] Releasing lock "refresh_cache-b5625b76-37e3-49be-bd3b-8b864021dbd1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.282766] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Acquired lock "refresh_cache-b5625b76-37e3-49be-bd3b-8b864021dbd1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.282952] env[61629]: DEBUG nova.network.neutron [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 612.549756] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.013s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.552214] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.610s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.552411] env[61629]: DEBUG nova.objects.instance [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61629) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 612.572121] env[61629]: INFO nova.scheduler.client.report [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Deleted allocations for instance 33029a57-19d2-45eb-b4ec-f50c47d3dc12 [ 612.801041] env[61629]: DEBUG nova.network.neutron [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.897594] env[61629]: DEBUG nova.network.neutron [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.079298] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2401566-8e4b-4267-b3e9-c289bc1801e1 tempest-ServerShowV254Test-809676224 tempest-ServerShowV254Test-809676224-project-member] Lock "33029a57-19d2-45eb-b4ec-f50c47d3dc12" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.933s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.202151] env[61629]: DEBUG nova.compute.manager [req-8cc15728-0162-4daf-a4ba-ec604454b29e req-6db952b5-596d-4165-a64a-11814b48b772 service nova] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Received event network-vif-deleted-fedaf39a-b596-4e1d-944b-71449da0184a {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 613.400551] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Releasing lock "refresh_cache-b5625b76-37e3-49be-bd3b-8b864021dbd1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.401098] env[61629]: DEBUG nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 613.401132] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 613.401469] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-860e97c3-28a3-4c8a-a001-bfb13680ded2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.410692] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b67044-feae-40dc-af1d-cbeaa02be82c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.431977] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b5625b76-37e3-49be-bd3b-8b864021dbd1 could not be found. [ 613.431977] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 613.432126] env[61629]: INFO nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Took 0.03 seconds to destroy the instance on the hypervisor. [ 613.432510] env[61629]: DEBUG oslo.service.loopingcall [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 613.432671] env[61629]: DEBUG nova.compute.manager [-] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 613.432716] env[61629]: DEBUG nova.network.neutron [-] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 613.461379] env[61629]: DEBUG nova.network.neutron [-] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.560617] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0a81c568-1eb0-4f1a-aafc-100ac30ca450 tempest-ServersAdmin275Test-1749339799 tempest-ServersAdmin275Test-1749339799-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.564659] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.139s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.565611] env[61629]: DEBUG nova.objects.instance [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Lazy-loading 'resources' on Instance uuid 733343f7-99e2-4e07-94eb-1b66458d799a {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 613.964319] env[61629]: DEBUG nova.network.neutron [-] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.466674] env[61629]: INFO nova.compute.manager [-] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Took 1.03 seconds to deallocate network for instance. [ 614.468882] env[61629]: DEBUG nova.compute.claims [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 614.469241] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.473433] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c92751-01bf-4b57-bfbe-0abfd6cb764f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.484021] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137267fa-dc46-407d-90db-767173caaa0c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.512046] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63cd48d0-6d32-425a-a61e-11af9daf98cd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.519708] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84231280-70aa-42a8-8096-e650a605f2c6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.532890] env[61629]: DEBUG nova.compute.provider_tree [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.036344] env[61629]: DEBUG nova.scheduler.client.report [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 615.545021] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.981s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.545021] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.839s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.574821] env[61629]: INFO nova.scheduler.client.report [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Deleted allocations for instance 733343f7-99e2-4e07-94eb-1b66458d799a [ 616.086461] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0e35128c-006f-471b-8a55-027d94cbdc88 tempest-ServersAdmin275Test-881078219 tempest-ServersAdmin275Test-881078219-project-member] Lock "733343f7-99e2-4e07-94eb-1b66458d799a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.483s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.555873] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda42e9e-5ec6-4cdd-8f4b-bef7d6d94483 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.563977] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620307b7-9e37-47b6-a7fd-f1f43e139555 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.594790] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb33bc9-1968-4a21-8de7-084e44c943cc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.602852] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af76c25-1682-46cb-a5ab-a21e03b4a691 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.616990] env[61629]: DEBUG nova.compute.provider_tree [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.121030] env[61629]: DEBUG nova.scheduler.client.report [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 617.629024] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.082s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.629024] env[61629]: ERROR nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 02865c61-ae10-4f73-b3a1-5027b2e9f76f, please check neutron logs for more information. [ 617.629024] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Traceback (most recent call last): [ 617.629024] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 617.629024] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] self.driver.spawn(context, instance, image_meta, [ 617.629024] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 617.629024] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 617.629024] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 617.629024] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] vm_ref = self.build_virtual_machine(instance, [ 617.629330] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 617.629330] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] vif_infos = vmwarevif.get_vif_info(self._session, [ 617.629330] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 617.629330] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] for vif in network_info: [ 617.629330] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 617.629330] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] return self._sync_wrapper(fn, *args, **kwargs) [ 617.629330] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 617.629330] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] self.wait() [ 617.629330] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 617.629330] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] self[:] = self._gt.wait() [ 617.629330] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 617.629330] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] return self._exit_event.wait() [ 617.629330] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 617.629664] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] result = hub.switch() [ 617.629664] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 617.629664] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] return self.greenlet.switch() [ 617.629664] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 617.629664] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] result = function(*args, **kwargs) [ 617.629664] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 617.629664] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] return func(*args, **kwargs) [ 617.629664] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 617.629664] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] raise e [ 617.629664] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 617.629664] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] nwinfo = self.network_api.allocate_for_instance( [ 617.629664] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 617.629664] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] created_port_ids = self._update_ports_for_instance( [ 617.629975] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 617.629975] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] with excutils.save_and_reraise_exception(): [ 617.629975] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.629975] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] self.force_reraise() [ 617.629975] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.629975] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] raise self.value [ 617.629975] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 617.629975] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] updated_port = self._update_port( [ 617.629975] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.629975] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] _ensure_no_port_binding_failure(port) [ 617.629975] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.629975] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] raise exception.PortBindingFailed(port_id=port['id']) [ 617.630292] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] nova.exception.PortBindingFailed: Binding failed for port 02865c61-ae10-4f73-b3a1-5027b2e9f76f, please check neutron logs for more information. [ 617.630292] env[61629]: ERROR nova.compute.manager [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] [ 617.630292] env[61629]: DEBUG nova.compute.utils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Binding failed for port 02865c61-ae10-4f73-b3a1-5027b2e9f76f, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 617.631113] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.571s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.634929] env[61629]: DEBUG nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Build of instance 62d7c997-cd38-43f5-a571-78a055ad05f7 was re-scheduled: Binding failed for port 02865c61-ae10-4f73-b3a1-5027b2e9f76f, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 617.636059] env[61629]: DEBUG nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 617.636485] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "refresh_cache-62d7c997-cd38-43f5-a571-78a055ad05f7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.637387] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "refresh_cache-62d7c997-cd38-43f5-a571-78a055ad05f7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.637387] env[61629]: DEBUG nova.network.neutron [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 618.039032] env[61629]: DEBUG oslo_concurrency.lockutils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "ad374170-21a1-4036-9804-b82493701abf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.039440] env[61629]: DEBUG oslo_concurrency.lockutils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "ad374170-21a1-4036-9804-b82493701abf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.162289] env[61629]: DEBUG nova.network.neutron [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.282801] env[61629]: DEBUG nova.network.neutron [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.573907] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4146f9-9916-496d-9f06-3a42cd9f940f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.581628] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667adfd9-000c-400b-9267-fe787a3b294e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.618828] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfafbc7-19a6-42cb-a636-7cca13bc0063 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.626714] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d96f27-24ec-433d-912b-b44358f39f33 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.641189] env[61629]: DEBUG nova.compute.provider_tree [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.785129] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "refresh_cache-62d7c997-cd38-43f5-a571-78a055ad05f7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.785450] env[61629]: DEBUG nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 618.785671] env[61629]: DEBUG nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 618.785876] env[61629]: DEBUG nova.network.neutron [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 618.801326] env[61629]: DEBUG nova.network.neutron [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 619.144979] env[61629]: DEBUG nova.scheduler.client.report [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 619.305645] env[61629]: DEBUG nova.network.neutron [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.650723] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.021s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.651447] env[61629]: ERROR nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 255c484c-cf0c-421e-a590-e8b70f204eee, please check neutron logs for more information. [ 619.651447] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Traceback (most recent call last): [ 619.651447] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 619.651447] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] self.driver.spawn(context, instance, image_meta, [ 619.651447] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 619.651447] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 619.651447] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 619.651447] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] vm_ref = self.build_virtual_machine(instance, [ 619.651447] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 619.651447] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] vif_infos = vmwarevif.get_vif_info(self._session, [ 619.651447] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 619.651806] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] for vif in network_info: [ 619.651806] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 619.651806] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] return self._sync_wrapper(fn, *args, **kwargs) [ 619.651806] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 619.651806] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] self.wait() [ 619.651806] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 619.651806] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] self[:] = self._gt.wait() [ 619.651806] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 619.651806] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] return self._exit_event.wait() [ 619.651806] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 619.651806] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] result = hub.switch() [ 619.651806] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 619.651806] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] return self.greenlet.switch() [ 619.652080] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 619.652080] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] result = function(*args, **kwargs) [ 619.652080] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 619.652080] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] return func(*args, **kwargs) [ 619.652080] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 619.652080] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] raise e [ 619.652080] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 619.652080] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] nwinfo = self.network_api.allocate_for_instance( [ 619.652080] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 619.652080] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] created_port_ids = self._update_ports_for_instance( [ 619.652080] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 619.652080] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] with excutils.save_and_reraise_exception(): [ 619.652080] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 619.652517] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] self.force_reraise() [ 619.652517] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 619.652517] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] raise self.value [ 619.652517] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 619.652517] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] updated_port = self._update_port( [ 619.652517] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 619.652517] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] _ensure_no_port_binding_failure(port) [ 619.652517] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 619.652517] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] raise exception.PortBindingFailed(port_id=port['id']) [ 619.652517] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] nova.exception.PortBindingFailed: Binding failed for port 255c484c-cf0c-421e-a590-e8b70f204eee, please check neutron logs for more information. [ 619.652517] env[61629]: ERROR nova.compute.manager [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] [ 619.652771] env[61629]: DEBUG nova.compute.utils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Binding failed for port 255c484c-cf0c-421e-a590-e8b70f204eee, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 619.653655] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.588s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.656799] env[61629]: DEBUG nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Build of instance 8d858fe9-1c97-457b-87ba-2d405bb7dcc0 was re-scheduled: Binding failed for port 255c484c-cf0c-421e-a590-e8b70f204eee, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 619.657283] env[61629]: DEBUG nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 619.657547] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Acquiring lock "refresh_cache-8d858fe9-1c97-457b-87ba-2d405bb7dcc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.657739] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Acquired lock "refresh_cache-8d858fe9-1c97-457b-87ba-2d405bb7dcc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.657925] env[61629]: DEBUG nova.network.neutron [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 619.808712] env[61629]: INFO nova.compute.manager [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 62d7c997-cd38-43f5-a571-78a055ad05f7] Took 1.02 seconds to deallocate network for instance. [ 620.178726] env[61629]: DEBUG nova.network.neutron [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.276863] env[61629]: DEBUG nova.network.neutron [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.600809] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9ac94e-bad1-44bb-913b-2184cdb05c42 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.608449] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ffb4d40-96d3-4be1-b5a4-4fd80b02ab88 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.640181] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2928373e-d397-435e-8fe0-9c3c8ee036d9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.647380] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4b1cab-4936-4e45-b082-8eb9b7c1345c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.660621] env[61629]: DEBUG nova.compute.provider_tree [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.779922] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Releasing lock "refresh_cache-8d858fe9-1c97-457b-87ba-2d405bb7dcc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.779922] env[61629]: DEBUG nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 620.779922] env[61629]: DEBUG nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 620.780075] env[61629]: DEBUG nova.network.neutron [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 620.803437] env[61629]: DEBUG nova.network.neutron [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.846890] env[61629]: INFO nova.scheduler.client.report [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Deleted allocations for instance 62d7c997-cd38-43f5-a571-78a055ad05f7 [ 621.166791] env[61629]: DEBUG nova.scheduler.client.report [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 621.308734] env[61629]: DEBUG nova.network.neutron [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.355744] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e911ba70-9b72-488b-80b3-4f10c60fa725 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "62d7c997-cd38-43f5-a571-78a055ad05f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.622s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.672081] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.018s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.672849] env[61629]: ERROR nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2288edb5-249b-4290-b09b-0a4321f47a5a, please check neutron logs for more information. [ 621.672849] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Traceback (most recent call last): [ 621.672849] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 621.672849] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] self.driver.spawn(context, instance, image_meta, [ 621.672849] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 621.672849] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 621.672849] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 621.672849] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] vm_ref = self.build_virtual_machine(instance, [ 621.672849] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 621.672849] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] vif_infos = vmwarevif.get_vif_info(self._session, [ 621.672849] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 621.673122] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] for vif in network_info: [ 621.673122] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 621.673122] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] return self._sync_wrapper(fn, *args, **kwargs) [ 621.673122] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 621.673122] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] self.wait() [ 621.673122] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 621.673122] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] self[:] = self._gt.wait() [ 621.673122] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 621.673122] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] return self._exit_event.wait() [ 621.673122] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 621.673122] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] result = hub.switch() [ 621.673122] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 621.673122] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] return self.greenlet.switch() [ 621.673389] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 621.673389] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] result = function(*args, **kwargs) [ 621.673389] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 621.673389] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] return func(*args, **kwargs) [ 621.673389] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 621.673389] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] raise e [ 621.673389] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 621.673389] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] nwinfo = self.network_api.allocate_for_instance( [ 621.673389] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 621.673389] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] created_port_ids = self._update_ports_for_instance( [ 621.673389] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 621.673389] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] with excutils.save_and_reraise_exception(): [ 621.673389] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.673689] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] self.force_reraise() [ 621.673689] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.673689] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] raise self.value [ 621.673689] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 621.673689] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] updated_port = self._update_port( [ 621.673689] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.673689] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] _ensure_no_port_binding_failure(port) [ 621.673689] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.673689] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] raise exception.PortBindingFailed(port_id=port['id']) [ 621.673689] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] nova.exception.PortBindingFailed: Binding failed for port 2288edb5-249b-4290-b09b-0a4321f47a5a, please check neutron logs for more information. [ 621.673689] env[61629]: ERROR nova.compute.manager [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] [ 621.673996] env[61629]: DEBUG nova.compute.utils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Binding failed for port 2288edb5-249b-4290-b09b-0a4321f47a5a, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 621.674889] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.442s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.678600] env[61629]: DEBUG nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Build of instance ce3a7a32-424a-48a4-b5c5-2a25190943f5 was re-scheduled: Binding failed for port 2288edb5-249b-4290-b09b-0a4321f47a5a, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 621.683359] env[61629]: DEBUG nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 621.683359] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Acquiring lock "refresh_cache-ce3a7a32-424a-48a4-b5c5-2a25190943f5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.683359] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Acquired lock "refresh_cache-ce3a7a32-424a-48a4-b5c5-2a25190943f5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.683359] env[61629]: DEBUG nova.network.neutron [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 621.811360] env[61629]: INFO nova.compute.manager [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] [instance: 8d858fe9-1c97-457b-87ba-2d405bb7dcc0] Took 1.03 seconds to deallocate network for instance. [ 621.857975] env[61629]: DEBUG nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 622.207212] env[61629]: DEBUG nova.network.neutron [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.332573] env[61629]: DEBUG nova.network.neutron [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.381149] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.672555] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cef182d-6cf1-4aca-8f01-8cd05434287d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.680568] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e252377d-3b65-466c-a34d-21a59437e9de {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.713319] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a42eeb-21ee-4dee-8b4f-b37bb9e835c3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.721377] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f505810-b6f0-4156-a8af-a706b4bcc2b1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.737507] env[61629]: DEBUG nova.compute.provider_tree [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.839523] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Releasing lock "refresh_cache-ce3a7a32-424a-48a4-b5c5-2a25190943f5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.839523] env[61629]: DEBUG nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 622.839523] env[61629]: DEBUG nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 622.839523] env[61629]: DEBUG nova.network.neutron [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 622.854082] env[61629]: INFO nova.scheduler.client.report [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Deleted allocations for instance 8d858fe9-1c97-457b-87ba-2d405bb7dcc0 [ 622.866242] env[61629]: DEBUG nova.network.neutron [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.241025] env[61629]: DEBUG nova.scheduler.client.report [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 623.364819] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3aa19b5-8439-4d76-9eb4-c77732c4e172 tempest-VolumesAssistedSnapshotsTest-970386526 tempest-VolumesAssistedSnapshotsTest-970386526-project-member] Lock "8d858fe9-1c97-457b-87ba-2d405bb7dcc0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.957s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.368027] env[61629]: DEBUG nova.network.neutron [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.752153] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.076s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.752153] env[61629]: ERROR nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a8217668-74ac-4d0c-811c-4995094be013, please check neutron logs for more information. [ 623.752153] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Traceback (most recent call last): [ 623.752153] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 623.752153] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] self.driver.spawn(context, instance, image_meta, [ 623.752153] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 623.752153] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] self._vmops.spawn(context, instance, image_meta, injected_files, [ 623.752153] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 623.752153] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] vm_ref = self.build_virtual_machine(instance, [ 623.752674] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 623.752674] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] vif_infos = vmwarevif.get_vif_info(self._session, [ 623.752674] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 623.752674] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] for vif in network_info: [ 623.752674] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 623.752674] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] return self._sync_wrapper(fn, *args, **kwargs) [ 623.752674] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 623.752674] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] self.wait() [ 623.752674] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 623.752674] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] self[:] = self._gt.wait() [ 623.752674] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 623.752674] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] return self._exit_event.wait() [ 623.752674] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 623.752970] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] result = hub.switch() [ 623.752970] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 623.752970] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] return self.greenlet.switch() [ 623.752970] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 623.752970] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] result = function(*args, **kwargs) [ 623.752970] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 623.752970] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] return func(*args, **kwargs) [ 623.752970] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 623.752970] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] raise e [ 623.752970] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 623.752970] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] nwinfo = self.network_api.allocate_for_instance( [ 623.752970] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 623.752970] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] created_port_ids = self._update_ports_for_instance( [ 623.753298] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 623.753298] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] with excutils.save_and_reraise_exception(): [ 623.753298] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 623.753298] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] self.force_reraise() [ 623.753298] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 623.753298] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] raise self.value [ 623.753298] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 623.753298] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] updated_port = self._update_port( [ 623.753298] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 623.753298] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] _ensure_no_port_binding_failure(port) [ 623.753298] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 623.753298] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] raise exception.PortBindingFailed(port_id=port['id']) [ 623.753563] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] nova.exception.PortBindingFailed: Binding failed for port a8217668-74ac-4d0c-811c-4995094be013, please check neutron logs for more information. [ 623.753563] env[61629]: ERROR nova.compute.manager [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] [ 623.757339] env[61629]: DEBUG nova.compute.utils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Binding failed for port a8217668-74ac-4d0c-811c-4995094be013, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 623.759061] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.733s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.763853] env[61629]: DEBUG nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Build of instance cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59 was re-scheduled: Binding failed for port a8217668-74ac-4d0c-811c-4995094be013, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 623.764510] env[61629]: DEBUG nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 623.764559] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Acquiring lock "refresh_cache-cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.764703] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Acquired lock "refresh_cache-cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.764853] env[61629]: DEBUG nova.network.neutron [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 623.772987] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquiring lock "079cb97b-b7d4-4f25-9f1d-f77f34a2efbe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.772987] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "079cb97b-b7d4-4f25-9f1d-f77f34a2efbe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.868017] env[61629]: DEBUG nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 623.873697] env[61629]: INFO nova.compute.manager [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] [instance: ce3a7a32-424a-48a4-b5c5-2a25190943f5] Took 1.04 seconds to deallocate network for instance. [ 624.297541] env[61629]: DEBUG nova.network.neutron [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.392603] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.563543] env[61629]: DEBUG nova.network.neutron [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.709298] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "443ad254-3d5d-4fb8-a565-ce70c352e3f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.709298] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "443ad254-3d5d-4fb8-a565-ce70c352e3f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.737347] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b05d2f-7e60-4fe5-abfe-dfb4f32f53e3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.745619] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc17923e-37a8-4388-aff9-bcbefe98ce37 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.777099] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aefeec2a-4ec8-45c4-bdfb-9824b3e8d31f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.784710] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26bd591-b439-4257-aa2f-c55c8add4ea2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.799304] env[61629]: DEBUG nova.compute.provider_tree [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.918248] env[61629]: INFO nova.scheduler.client.report [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Deleted allocations for instance ce3a7a32-424a-48a4-b5c5-2a25190943f5 [ 625.066687] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Releasing lock "refresh_cache-cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.066947] env[61629]: DEBUG nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 625.067147] env[61629]: DEBUG nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 625.067317] env[61629]: DEBUG nova.network.neutron [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 625.082591] env[61629]: DEBUG nova.network.neutron [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.303269] env[61629]: DEBUG nova.scheduler.client.report [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 625.430248] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b5c059e-2e5c-47e6-a98c-089a09d9f6fe tempest-ImagesOneServerTestJSON-612300628 tempest-ImagesOneServerTestJSON-612300628-project-member] Lock "ce3a7a32-424a-48a4-b5c5-2a25190943f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.481s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.585084] env[61629]: DEBUG nova.network.neutron [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.811167] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.049s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.811167] env[61629]: ERROR nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 41caef89-d69f-4c91-b9fd-6e89296aba9c, please check neutron logs for more information. [ 625.811167] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Traceback (most recent call last): [ 625.811167] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 625.811167] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] self.driver.spawn(context, instance, image_meta, [ 625.811167] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 625.811167] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] self._vmops.spawn(context, instance, image_meta, injected_files, [ 625.811167] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 625.811167] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] vm_ref = self.build_virtual_machine(instance, [ 625.812084] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 625.812084] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] vif_infos = vmwarevif.get_vif_info(self._session, [ 625.812084] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 625.812084] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] for vif in network_info: [ 625.812084] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 625.812084] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] return self._sync_wrapper(fn, *args, **kwargs) [ 625.812084] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 625.812084] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] self.wait() [ 625.812084] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 625.812084] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] self[:] = self._gt.wait() [ 625.812084] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 625.812084] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] return self._exit_event.wait() [ 625.812084] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 625.812575] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] current.throw(*self._exc) [ 625.812575] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 625.812575] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] result = function(*args, **kwargs) [ 625.812575] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 625.812575] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] return func(*args, **kwargs) [ 625.812575] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 625.812575] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] raise e [ 625.812575] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 625.812575] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] nwinfo = self.network_api.allocate_for_instance( [ 625.812575] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 625.812575] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] created_port_ids = self._update_ports_for_instance( [ 625.812575] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 625.812575] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] with excutils.save_and_reraise_exception(): [ 625.820169] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.820169] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] self.force_reraise() [ 625.820169] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.820169] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] raise self.value [ 625.820169] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 625.820169] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] updated_port = self._update_port( [ 625.820169] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.820169] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] _ensure_no_port_binding_failure(port) [ 625.820169] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.820169] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] raise exception.PortBindingFailed(port_id=port['id']) [ 625.820169] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] nova.exception.PortBindingFailed: Binding failed for port 41caef89-d69f-4c91-b9fd-6e89296aba9c, please check neutron logs for more information. [ 625.820169] env[61629]: ERROR nova.compute.manager [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] [ 625.820666] env[61629]: DEBUG nova.compute.utils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Binding failed for port 41caef89-d69f-4c91-b9fd-6e89296aba9c, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 625.820666] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.476s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.820666] env[61629]: DEBUG nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Build of instance e40e1443-6d5d-41e1-9822-08b782e39d27 was re-scheduled: Binding failed for port 41caef89-d69f-4c91-b9fd-6e89296aba9c, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 625.820666] env[61629]: DEBUG nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 625.820793] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Acquiring lock "refresh_cache-e40e1443-6d5d-41e1-9822-08b782e39d27" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.820793] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Acquired lock "refresh_cache-e40e1443-6d5d-41e1-9822-08b782e39d27" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.820793] env[61629]: DEBUG nova.network.neutron [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 625.932066] env[61629]: DEBUG nova.compute.manager [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 626.087876] env[61629]: INFO nova.compute.manager [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] [instance: cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59] Took 1.02 seconds to deallocate network for instance. [ 626.342328] env[61629]: DEBUG nova.network.neutron [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.414494] env[61629]: DEBUG nova.network.neutron [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.450501] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.787833] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a0993b-e9e5-4f50-a4e7-361f302b74db {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.805199] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3dc691-8e9d-4875-ac0a-f0f4df2dcf72 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.459768] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Releasing lock "refresh_cache-e40e1443-6d5d-41e1-9822-08b782e39d27" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.460038] env[61629]: DEBUG nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 627.460183] env[61629]: DEBUG nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 627.460344] env[61629]: DEBUG nova.network.neutron [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 627.465253] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3929bf40-338d-430d-8698-26e9ead483f4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.472753] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13ca23c-d56d-41fe-bb7b-4d5794260c0b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.485611] env[61629]: DEBUG nova.compute.provider_tree [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.487405] env[61629]: DEBUG nova.network.neutron [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.495917] env[61629]: INFO nova.scheduler.client.report [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Deleted allocations for instance cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59 [ 627.993220] env[61629]: DEBUG nova.scheduler.client.report [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 627.994546] env[61629]: DEBUG nova.network.neutron [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.003781] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d45b7d32-d0e8-4fa4-8b57-4a9a28adb9a4 tempest-ServersWithSpecificFlavorTestJSON-2142828875 tempest-ServersWithSpecificFlavorTestJSON-2142828875-project-member] Lock "cd0af99e-0a90-4aa4-80b5-f6a7a3b7ec59" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.920s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.498185] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.686s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.498656] env[61629]: ERROR nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c65338f3-f7e6-4be6-8cbb-d35aabc9f788, please check neutron logs for more information. [ 628.498656] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Traceback (most recent call last): [ 628.498656] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 628.498656] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] self.driver.spawn(context, instance, image_meta, [ 628.498656] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 628.498656] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 628.498656] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 628.498656] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] vm_ref = self.build_virtual_machine(instance, [ 628.498656] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 628.498656] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] vif_infos = vmwarevif.get_vif_info(self._session, [ 628.498656] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 628.498995] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] for vif in network_info: [ 628.498995] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 628.498995] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] return self._sync_wrapper(fn, *args, **kwargs) [ 628.498995] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 628.498995] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] self.wait() [ 628.498995] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 628.498995] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] self[:] = self._gt.wait() [ 628.498995] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 628.498995] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] return self._exit_event.wait() [ 628.498995] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 628.498995] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] current.throw(*self._exc) [ 628.498995] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.498995] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] result = function(*args, **kwargs) [ 628.499351] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 628.499351] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] return func(*args, **kwargs) [ 628.499351] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.499351] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] raise e [ 628.499351] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.499351] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] nwinfo = self.network_api.allocate_for_instance( [ 628.499351] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 628.499351] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] created_port_ids = self._update_ports_for_instance( [ 628.499351] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 628.499351] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] with excutils.save_and_reraise_exception(): [ 628.499351] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.499351] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] self.force_reraise() [ 628.499351] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.499707] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] raise self.value [ 628.499707] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 628.499707] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] updated_port = self._update_port( [ 628.499707] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.499707] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] _ensure_no_port_binding_failure(port) [ 628.499707] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.499707] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] raise exception.PortBindingFailed(port_id=port['id']) [ 628.499707] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] nova.exception.PortBindingFailed: Binding failed for port c65338f3-f7e6-4be6-8cbb-d35aabc9f788, please check neutron logs for more information. [ 628.499707] env[61629]: ERROR nova.compute.manager [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] [ 628.499707] env[61629]: DEBUG nova.compute.utils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Binding failed for port c65338f3-f7e6-4be6-8cbb-d35aabc9f788, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 628.501619] env[61629]: DEBUG oslo_concurrency.lockutils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.172s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.504680] env[61629]: INFO nova.compute.manager [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] [instance: e40e1443-6d5d-41e1-9822-08b782e39d27] Took 1.04 seconds to deallocate network for instance. [ 628.508134] env[61629]: DEBUG nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Build of instance 01c864cd-58a3-4061-836d-6a86ad37e4c4 was re-scheduled: Binding failed for port c65338f3-f7e6-4be6-8cbb-d35aabc9f788, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 628.508605] env[61629]: DEBUG nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 628.508862] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Acquiring lock "refresh_cache-01c864cd-58a3-4061-836d-6a86ad37e4c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.509045] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Acquired lock "refresh_cache-01c864cd-58a3-4061-836d-6a86ad37e4c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.509419] env[61629]: DEBUG nova.network.neutron [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 628.510717] env[61629]: DEBUG nova.compute.manager [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 629.039977] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.051978] env[61629]: DEBUG nova.network.neutron [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.190603] env[61629]: DEBUG nova.network.neutron [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.459989] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451a7619-5a7b-4139-a837-151a5f4da955 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.468624] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759a462e-92c5-4005-8c2f-2945a016a3c8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.498776] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69efb20-5ce7-49fa-bb4d-a0dbbe37b512 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.506401] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbcbeea-5078-4cb2-8953-7771a75cbd4d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.520304] env[61629]: DEBUG nova.compute.provider_tree [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 629.550133] env[61629]: INFO nova.scheduler.client.report [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Deleted allocations for instance e40e1443-6d5d-41e1-9822-08b782e39d27 [ 629.693249] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Releasing lock "refresh_cache-01c864cd-58a3-4061-836d-6a86ad37e4c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.693501] env[61629]: DEBUG nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 629.693687] env[61629]: DEBUG nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 629.693888] env[61629]: DEBUG nova.network.neutron [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 629.712320] env[61629]: DEBUG nova.network.neutron [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.025649] env[61629]: DEBUG nova.scheduler.client.report [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 630.061507] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a64d14ff-d2d9-427f-8aaf-b625c8887ce5 tempest-ServerAddressesNegativeTestJSON-267386311 tempest-ServerAddressesNegativeTestJSON-267386311-project-member] Lock "e40e1443-6d5d-41e1-9822-08b782e39d27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.251s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.215635] env[61629]: DEBUG nova.network.neutron [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.532463] env[61629]: DEBUG oslo_concurrency.lockutils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.031s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.533164] env[61629]: ERROR nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1b65b374-ffea-4bbe-8378-6c0e4634c57c, please check neutron logs for more information. [ 630.533164] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Traceback (most recent call last): [ 630.533164] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 630.533164] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] self.driver.spawn(context, instance, image_meta, [ 630.533164] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 630.533164] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 630.533164] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 630.533164] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] vm_ref = self.build_virtual_machine(instance, [ 630.533164] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 630.533164] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] vif_infos = vmwarevif.get_vif_info(self._session, [ 630.533164] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 630.533533] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] for vif in network_info: [ 630.533533] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 630.533533] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] return self._sync_wrapper(fn, *args, **kwargs) [ 630.533533] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 630.533533] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] self.wait() [ 630.533533] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 630.533533] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] self[:] = self._gt.wait() [ 630.533533] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 630.533533] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] return self._exit_event.wait() [ 630.533533] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 630.533533] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] result = hub.switch() [ 630.533533] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 630.533533] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] return self.greenlet.switch() [ 630.533959] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 630.533959] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] result = function(*args, **kwargs) [ 630.533959] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 630.533959] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] return func(*args, **kwargs) [ 630.533959] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 630.533959] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] raise e [ 630.533959] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 630.533959] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] nwinfo = self.network_api.allocate_for_instance( [ 630.533959] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 630.533959] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] created_port_ids = self._update_ports_for_instance( [ 630.533959] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 630.533959] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] with excutils.save_and_reraise_exception(): [ 630.533959] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 630.534378] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] self.force_reraise() [ 630.534378] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 630.534378] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] raise self.value [ 630.534378] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 630.534378] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] updated_port = self._update_port( [ 630.534378] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 630.534378] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] _ensure_no_port_binding_failure(port) [ 630.534378] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 630.534378] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] raise exception.PortBindingFailed(port_id=port['id']) [ 630.534378] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] nova.exception.PortBindingFailed: Binding failed for port 1b65b374-ffea-4bbe-8378-6c0e4634c57c, please check neutron logs for more information. [ 630.534378] env[61629]: ERROR nova.compute.manager [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] [ 630.534731] env[61629]: DEBUG nova.compute.utils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Binding failed for port 1b65b374-ffea-4bbe-8378-6c0e4634c57c, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 630.535275] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.820s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.538180] env[61629]: DEBUG nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Build of instance 9b950dc9-d79c-4b30-8b71-1910b46ffd9b was re-scheduled: Binding failed for port 1b65b374-ffea-4bbe-8378-6c0e4634c57c, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 630.538669] env[61629]: DEBUG nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 630.539040] env[61629]: DEBUG oslo_concurrency.lockutils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Acquiring lock "refresh_cache-9b950dc9-d79c-4b30-8b71-1910b46ffd9b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.539234] env[61629]: DEBUG oslo_concurrency.lockutils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Acquired lock "refresh_cache-9b950dc9-d79c-4b30-8b71-1910b46ffd9b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.539447] env[61629]: DEBUG nova.network.neutron [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 630.563915] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 630.722140] env[61629]: INFO nova.compute.manager [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] [instance: 01c864cd-58a3-4061-836d-6a86ad37e4c4] Took 1.03 seconds to deallocate network for instance. [ 631.078779] env[61629]: DEBUG nova.network.neutron [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 631.088136] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.299106] env[61629]: DEBUG nova.network.neutron [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.529303] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1559c9d9-3033-4fc2-b412-ede1a2b80e00 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.541032] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4181865-f339-440d-b969-7e26acb4e374 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.571982] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54799f53-e1dd-43c0-9353-a7077d727f24 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.580064] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c29071d-dbc9-45b1-872c-abed3dbf4e0f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.595497] env[61629]: DEBUG nova.compute.provider_tree [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.771188] env[61629]: INFO nova.scheduler.client.report [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Deleted allocations for instance 01c864cd-58a3-4061-836d-6a86ad37e4c4 [ 631.802223] env[61629]: DEBUG oslo_concurrency.lockutils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Releasing lock "refresh_cache-9b950dc9-d79c-4b30-8b71-1910b46ffd9b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.802556] env[61629]: DEBUG nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 631.803020] env[61629]: DEBUG nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 631.803020] env[61629]: DEBUG nova.network.neutron [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 631.828923] env[61629]: DEBUG nova.network.neutron [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 632.100495] env[61629]: DEBUG nova.scheduler.client.report [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 632.281429] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8670f996-2d52-4d4b-9c5f-d6c753754f9f tempest-ServerActionsTestJSON-674021798 tempest-ServerActionsTestJSON-674021798-project-member] Lock "01c864cd-58a3-4061-836d-6a86ad37e4c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.960s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.332135] env[61629]: DEBUG nova.network.neutron [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.606773] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.071s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.607406] env[61629]: ERROR nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7f4923f5-c864-45fa-8b34-99fc014ec84d, please check neutron logs for more information. [ 632.607406] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Traceback (most recent call last): [ 632.607406] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 632.607406] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] self.driver.spawn(context, instance, image_meta, [ 632.607406] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 632.607406] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] self._vmops.spawn(context, instance, image_meta, injected_files, [ 632.607406] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 632.607406] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] vm_ref = self.build_virtual_machine(instance, [ 632.607406] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 632.607406] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] vif_infos = vmwarevif.get_vif_info(self._session, [ 632.607406] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 632.608297] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] for vif in network_info: [ 632.608297] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 632.608297] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] return self._sync_wrapper(fn, *args, **kwargs) [ 632.608297] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 632.608297] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] self.wait() [ 632.608297] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 632.608297] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] self[:] = self._gt.wait() [ 632.608297] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 632.608297] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] return self._exit_event.wait() [ 632.608297] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 632.608297] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] current.throw(*self._exc) [ 632.608297] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 632.608297] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] result = function(*args, **kwargs) [ 632.609132] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 632.609132] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] return func(*args, **kwargs) [ 632.609132] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 632.609132] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] raise e [ 632.609132] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 632.609132] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] nwinfo = self.network_api.allocate_for_instance( [ 632.609132] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 632.609132] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] created_port_ids = self._update_ports_for_instance( [ 632.609132] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 632.609132] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] with excutils.save_and_reraise_exception(): [ 632.609132] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.609132] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] self.force_reraise() [ 632.609132] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.609717] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] raise self.value [ 632.609717] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 632.609717] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] updated_port = self._update_port( [ 632.609717] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.609717] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] _ensure_no_port_binding_failure(port) [ 632.609717] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.609717] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] raise exception.PortBindingFailed(port_id=port['id']) [ 632.609717] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] nova.exception.PortBindingFailed: Binding failed for port 7f4923f5-c864-45fa-8b34-99fc014ec84d, please check neutron logs for more information. [ 632.609717] env[61629]: ERROR nova.compute.manager [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] [ 632.609717] env[61629]: DEBUG nova.compute.utils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Binding failed for port 7f4923f5-c864-45fa-8b34-99fc014ec84d, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 632.611588] env[61629]: DEBUG nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Build of instance b2ec37a4-09f6-428c-bca9-1ec121c9c390 was re-scheduled: Binding failed for port 7f4923f5-c864-45fa-8b34-99fc014ec84d, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 632.612174] env[61629]: DEBUG nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 632.612249] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Acquiring lock "refresh_cache-b2ec37a4-09f6-428c-bca9-1ec121c9c390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.612733] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Acquired lock "refresh_cache-b2ec37a4-09f6-428c-bca9-1ec121c9c390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.612733] env[61629]: DEBUG nova.network.neutron [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 632.613676] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.307s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.792450] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 632.835372] env[61629]: INFO nova.compute.manager [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] [instance: 9b950dc9-d79c-4b30-8b71-1910b46ffd9b] Took 1.03 seconds to deallocate network for instance. [ 633.142580] env[61629]: DEBUG nova.network.neutron [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.204167] env[61629]: DEBUG nova.network.neutron [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.312382] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.550490] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85ee120-db19-4001-bb66-9f4d042e03aa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.557337] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8db53f-4edf-41c8-bc48-931f35cacd5d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.589401] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a847a27e-c61b-4421-85d1-f1c1544b388b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.597296] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638802c8-31ae-4416-ba6d-47baec2244bb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.612409] env[61629]: DEBUG nova.compute.provider_tree [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 633.707373] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Releasing lock "refresh_cache-b2ec37a4-09f6-428c-bca9-1ec121c9c390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.708022] env[61629]: DEBUG nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 633.708445] env[61629]: DEBUG nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 633.708802] env[61629]: DEBUG nova.network.neutron [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 633.728171] env[61629]: DEBUG nova.network.neutron [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.871963] env[61629]: INFO nova.scheduler.client.report [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Deleted allocations for instance 9b950dc9-d79c-4b30-8b71-1910b46ffd9b [ 634.120469] env[61629]: DEBUG nova.scheduler.client.report [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 634.230699] env[61629]: DEBUG nova.network.neutron [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.383348] env[61629]: DEBUG oslo_concurrency.lockutils [None req-18094c97-b0fa-4f34-b7f2-cb7cfe910598 tempest-ServersV294TestFqdnHostnames-1477841666 tempest-ServersV294TestFqdnHostnames-1477841666-project-member] Lock "9b950dc9-d79c-4b30-8b71-1910b46ffd9b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.700s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.629321] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.015s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.630140] env[61629]: ERROR nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 49005889-bfb0-4cb2-aecc-2e83d8b90c66, please check neutron logs for more information. [ 634.630140] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Traceback (most recent call last): [ 634.630140] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 634.630140] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] self.driver.spawn(context, instance, image_meta, [ 634.630140] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 634.630140] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] self._vmops.spawn(context, instance, image_meta, injected_files, [ 634.630140] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 634.630140] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] vm_ref = self.build_virtual_machine(instance, [ 634.630140] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 634.630140] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] vif_infos = vmwarevif.get_vif_info(self._session, [ 634.630140] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 634.630420] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] for vif in network_info: [ 634.630420] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 634.630420] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] return self._sync_wrapper(fn, *args, **kwargs) [ 634.630420] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 634.630420] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] self.wait() [ 634.630420] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 634.630420] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] self[:] = self._gt.wait() [ 634.630420] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 634.630420] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] return self._exit_event.wait() [ 634.630420] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 634.630420] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] current.throw(*self._exc) [ 634.630420] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 634.630420] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] result = function(*args, **kwargs) [ 634.630746] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 634.630746] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] return func(*args, **kwargs) [ 634.630746] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 634.630746] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] raise e [ 634.630746] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 634.630746] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] nwinfo = self.network_api.allocate_for_instance( [ 634.630746] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 634.630746] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] created_port_ids = self._update_ports_for_instance( [ 634.630746] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 634.630746] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] with excutils.save_and_reraise_exception(): [ 634.630746] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 634.630746] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] self.force_reraise() [ 634.630746] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 634.631058] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] raise self.value [ 634.631058] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 634.631058] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] updated_port = self._update_port( [ 634.631058] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 634.631058] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] _ensure_no_port_binding_failure(port) [ 634.631058] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 634.631058] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] raise exception.PortBindingFailed(port_id=port['id']) [ 634.631058] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] nova.exception.PortBindingFailed: Binding failed for port 49005889-bfb0-4cb2-aecc-2e83d8b90c66, please check neutron logs for more information. [ 634.631058] env[61629]: ERROR nova.compute.manager [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] [ 634.632948] env[61629]: DEBUG nova.compute.utils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Binding failed for port 49005889-bfb0-4cb2-aecc-2e83d8b90c66, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 634.634070] env[61629]: DEBUG nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Build of instance b0343f07-0539-4395-81c8-46ca1f2a8920 was re-scheduled: Binding failed for port 49005889-bfb0-4cb2-aecc-2e83d8b90c66, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 634.634520] env[61629]: DEBUG nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 634.634771] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Acquiring lock "refresh_cache-b0343f07-0539-4395-81c8-46ca1f2a8920" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.634887] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Acquired lock "refresh_cache-b0343f07-0539-4395-81c8-46ca1f2a8920" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.635069] env[61629]: DEBUG nova.network.neutron [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 634.636573] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.167s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.737548] env[61629]: INFO nova.compute.manager [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] [instance: b2ec37a4-09f6-428c-bca9-1ec121c9c390] Took 1.03 seconds to deallocate network for instance. [ 634.886491] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 635.157219] env[61629]: DEBUG nova.network.neutron [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.269247] env[61629]: DEBUG nova.network.neutron [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.351061] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquiring lock "d013c1e1-952a-4b76-a44d-8499f5159c42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.351313] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "d013c1e1-952a-4b76-a44d-8499f5159c42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.406650] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.565431] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389b24f3-c654-483f-8137-95d5cbe6d0c2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.573646] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deea6f4e-5adb-413a-8768-3c08561197d7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.603548] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272748d0-cfeb-40e2-875d-0b22aec5ceae {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.611704] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d4531d-5206-4286-979b-5837ff95daaa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.628329] env[61629]: DEBUG nova.compute.provider_tree [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 635.775372] env[61629]: INFO nova.scheduler.client.report [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Deleted allocations for instance b2ec37a4-09f6-428c-bca9-1ec121c9c390 [ 635.791654] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Releasing lock "refresh_cache-b0343f07-0539-4395-81c8-46ca1f2a8920" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.791654] env[61629]: DEBUG nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 635.791654] env[61629]: DEBUG nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 635.791654] env[61629]: DEBUG nova.network.neutron [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 635.819776] env[61629]: DEBUG nova.network.neutron [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 636.130720] env[61629]: DEBUG nova.scheduler.client.report [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 636.289061] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e5b8961c-5271-431b-80c2-e65b94b1d503 tempest-ServerExternalEventsTest-2128929015 tempest-ServerExternalEventsTest-2128929015-project-member] Lock "b2ec37a4-09f6-428c-bca9-1ec121c9c390" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.625s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.324533] env[61629]: DEBUG nova.network.neutron [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.636709] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.637349] env[61629]: ERROR nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fedaf39a-b596-4e1d-944b-71449da0184a, please check neutron logs for more information. [ 636.637349] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Traceback (most recent call last): [ 636.637349] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 636.637349] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] self.driver.spawn(context, instance, image_meta, [ 636.637349] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 636.637349] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 636.637349] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 636.637349] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] vm_ref = self.build_virtual_machine(instance, [ 636.637349] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 636.637349] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] vif_infos = vmwarevif.get_vif_info(self._session, [ 636.637349] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 636.637716] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] for vif in network_info: [ 636.637716] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 636.637716] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] return self._sync_wrapper(fn, *args, **kwargs) [ 636.637716] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 636.637716] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] self.wait() [ 636.637716] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 636.637716] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] self[:] = self._gt.wait() [ 636.637716] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 636.637716] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] return self._exit_event.wait() [ 636.637716] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 636.637716] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] current.throw(*self._exc) [ 636.637716] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 636.637716] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] result = function(*args, **kwargs) [ 636.638084] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 636.638084] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] return func(*args, **kwargs) [ 636.638084] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 636.638084] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] raise e [ 636.638084] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 636.638084] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] nwinfo = self.network_api.allocate_for_instance( [ 636.638084] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 636.638084] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] created_port_ids = self._update_ports_for_instance( [ 636.638084] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 636.638084] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] with excutils.save_and_reraise_exception(): [ 636.638084] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 636.638084] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] self.force_reraise() [ 636.638084] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 636.638491] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] raise self.value [ 636.638491] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 636.638491] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] updated_port = self._update_port( [ 636.638491] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 636.638491] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] _ensure_no_port_binding_failure(port) [ 636.638491] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 636.638491] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] raise exception.PortBindingFailed(port_id=port['id']) [ 636.638491] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] nova.exception.PortBindingFailed: Binding failed for port fedaf39a-b596-4e1d-944b-71449da0184a, please check neutron logs for more information. [ 636.638491] env[61629]: ERROR nova.compute.manager [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] [ 636.638491] env[61629]: DEBUG nova.compute.utils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Binding failed for port fedaf39a-b596-4e1d-944b-71449da0184a, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 636.639441] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.258s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.640828] env[61629]: INFO nova.compute.claims [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 636.647452] env[61629]: DEBUG nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Build of instance b5625b76-37e3-49be-bd3b-8b864021dbd1 was re-scheduled: Binding failed for port fedaf39a-b596-4e1d-944b-71449da0184a, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 636.647907] env[61629]: DEBUG nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 636.648138] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Acquiring lock "refresh_cache-b5625b76-37e3-49be-bd3b-8b864021dbd1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.648287] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Acquired lock "refresh_cache-b5625b76-37e3-49be-bd3b-8b864021dbd1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.648956] env[61629]: DEBUG nova.network.neutron [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 636.718826] env[61629]: DEBUG oslo_concurrency.lockutils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Acquiring lock "f5830e36-257a-418a-add6-01195bb7d103" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.723287] env[61629]: DEBUG oslo_concurrency.lockutils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Lock "f5830e36-257a-418a-add6-01195bb7d103" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.791978] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 636.828614] env[61629]: INFO nova.compute.manager [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] [instance: b0343f07-0539-4395-81c8-46ca1f2a8920] Took 1.04 seconds to deallocate network for instance. [ 637.019388] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Acquiring lock "52816a66-442f-4869-aee3-0cebd6f5e9bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.019611] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Lock "52816a66-442f-4869-aee3-0cebd6f5e9bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.171246] env[61629]: DEBUG nova.network.neutron [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.305764] env[61629]: DEBUG nova.network.neutron [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.315644] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.808482] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Releasing lock "refresh_cache-b5625b76-37e3-49be-bd3b-8b864021dbd1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.808744] env[61629]: DEBUG nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 637.808895] env[61629]: DEBUG nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 637.809072] env[61629]: DEBUG nova.network.neutron [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 637.833335] env[61629]: DEBUG nova.network.neutron [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.884803] env[61629]: INFO nova.scheduler.client.report [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Deleted allocations for instance b0343f07-0539-4395-81c8-46ca1f2a8920 [ 638.216380] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8372dbe1-1bc2-4a75-9292-e7cb466642b9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.225706] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55708460-e152-4ead-a07f-8be82222e842 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.266970] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4eba16-91a3-4a59-ba86-be442e15efe9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.276214] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47fe9520-7581-41cc-830e-666331dd187b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.291205] env[61629]: DEBUG nova.compute.provider_tree [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.344039] env[61629]: DEBUG nova.network.neutron [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.398156] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f330c37-cbd6-4c3d-a752-8f332899426d tempest-ServerAddressesTestJSON-2114920335 tempest-ServerAddressesTestJSON-2114920335-project-member] Lock "b0343f07-0539-4395-81c8-46ca1f2a8920" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.269s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.795395] env[61629]: DEBUG nova.scheduler.client.report [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 638.847972] env[61629]: INFO nova.compute.manager [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] [instance: b5625b76-37e3-49be-bd3b-8b864021dbd1] Took 1.04 seconds to deallocate network for instance. [ 638.900339] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 639.302211] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.662s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.303209] env[61629]: DEBUG nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 639.305195] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.913s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.307118] env[61629]: INFO nova.compute.claims [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 639.423155] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.817030] env[61629]: DEBUG nova.compute.utils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 639.818208] env[61629]: DEBUG nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 639.818512] env[61629]: DEBUG nova.network.neutron [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 639.860043] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Acquiring lock "fa8a181b-2170-4c38-98d6-adc4e5a80f94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.860516] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Lock "fa8a181b-2170-4c38-98d6-adc4e5a80f94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.881348] env[61629]: DEBUG nova.policy [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b26212b71b0644f0b01bcf41a7c23f0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '89bd7311fc964320aa425590dc03b210', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 639.883689] env[61629]: INFO nova.scheduler.client.report [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Deleted allocations for instance b5625b76-37e3-49be-bd3b-8b864021dbd1 [ 640.324217] env[61629]: DEBUG nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 640.394023] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8c064a87-32d5-40b8-9777-cea38234be2a tempest-FloatingIPsAssociationTestJSON-1471806569 tempest-FloatingIPsAssociationTestJSON-1471806569-project-member] Lock "b5625b76-37e3-49be-bd3b-8b864021dbd1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.223s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.549952] env[61629]: DEBUG nova.network.neutron [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Successfully created port: 683a0eb5-ccf5-4e74-9beb-82cbf25490b5 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 640.781373] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6c9e21-1230-4648-ac0d-5274fed32860 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.790548] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7b3c3a-5ec1-430a-819b-5c7d60aa0fe4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.826996] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d8342f-f5ee-45c7-84b3-a2860ebce60e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.840412] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52d0e40-2645-4f98-8e61-207a8ec54224 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.855376] env[61629]: DEBUG nova.compute.provider_tree [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.894965] env[61629]: DEBUG nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 641.337904] env[61629]: DEBUG nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 641.358782] env[61629]: DEBUG nova.scheduler.client.report [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 641.388757] env[61629]: DEBUG nova.virt.hardware [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 641.389008] env[61629]: DEBUG nova.virt.hardware [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 641.389203] env[61629]: DEBUG nova.virt.hardware [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 641.389445] env[61629]: DEBUG nova.virt.hardware [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 641.389616] env[61629]: DEBUG nova.virt.hardware [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 641.389768] env[61629]: DEBUG nova.virt.hardware [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 641.389978] env[61629]: DEBUG nova.virt.hardware [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 641.390246] env[61629]: DEBUG nova.virt.hardware [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 641.390429] env[61629]: DEBUG nova.virt.hardware [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 641.390598] env[61629]: DEBUG nova.virt.hardware [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 641.391138] env[61629]: DEBUG nova.virt.hardware [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 641.391733] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330ea2d0-3b00-4755-9f77-38afda0ace09 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.404208] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17688fcf-633e-4efe-808b-cad2769f3b28 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.425604] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.863985] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.559s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.864569] env[61629]: DEBUG nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 641.867780] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.417s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.869237] env[61629]: INFO nova.compute.claims [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.372094] env[61629]: DEBUG nova.compute.utils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 642.372094] env[61629]: DEBUG nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 642.372094] env[61629]: DEBUG nova.network.neutron [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 642.483418] env[61629]: DEBUG nova.policy [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b26212b71b0644f0b01bcf41a7c23f0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '89bd7311fc964320aa425590dc03b210', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 642.654125] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.654297] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.872739] env[61629]: DEBUG nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 642.974128] env[61629]: DEBUG nova.compute.manager [req-b542762b-4d66-4991-a889-1e7a52b7785d req-2f1a3fe2-37d1-4935-a189-70a40cd89598 service nova] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Received event network-changed-683a0eb5-ccf5-4e74-9beb-82cbf25490b5 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 642.975029] env[61629]: DEBUG nova.compute.manager [req-b542762b-4d66-4991-a889-1e7a52b7785d req-2f1a3fe2-37d1-4935-a189-70a40cd89598 service nova] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Refreshing instance network info cache due to event network-changed-683a0eb5-ccf5-4e74-9beb-82cbf25490b5. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 642.975029] env[61629]: DEBUG oslo_concurrency.lockutils [req-b542762b-4d66-4991-a889-1e7a52b7785d req-2f1a3fe2-37d1-4935-a189-70a40cd89598 service nova] Acquiring lock "refresh_cache-d43d47a2-a27b-4bb8-9421-61805064a3d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.975029] env[61629]: DEBUG oslo_concurrency.lockutils [req-b542762b-4d66-4991-a889-1e7a52b7785d req-2f1a3fe2-37d1-4935-a189-70a40cd89598 service nova] Acquired lock "refresh_cache-d43d47a2-a27b-4bb8-9421-61805064a3d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.975029] env[61629]: DEBUG nova.network.neutron [req-b542762b-4d66-4991-a889-1e7a52b7785d req-2f1a3fe2-37d1-4935-a189-70a40cd89598 service nova] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Refreshing network info cache for port 683a0eb5-ccf5-4e74-9beb-82cbf25490b5 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 643.159670] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.159770] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Starting heal instance info cache {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 643.159810] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Rebuilding the list of instances to heal {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 643.372739] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d3ba6b-9a44-473a-a627-f1ac0f6a08b9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.385242] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66eda48-5769-4a01-9e5e-616dd33c205b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.425573] env[61629]: DEBUG nova.network.neutron [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Successfully created port: 5881b127-84c0-467c-99b3-a3b33be5a839 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 643.428475] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd9a784-e980-45ef-a44b-263fa988ecd1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.440826] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d1aedf-2e8b-450f-ab57-33be55c0289f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.455701] env[61629]: DEBUG nova.compute.provider_tree [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.459330] env[61629]: ERROR nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 683a0eb5-ccf5-4e74-9beb-82cbf25490b5, please check neutron logs for more information. [ 643.459330] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 643.459330] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 643.459330] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 643.459330] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 643.459330] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 643.459330] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 643.459330] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 643.459330] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.459330] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 643.459330] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.459330] env[61629]: ERROR nova.compute.manager raise self.value [ 643.459330] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 643.459330] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 643.459330] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.459330] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 643.459782] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.459782] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 643.459782] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 683a0eb5-ccf5-4e74-9beb-82cbf25490b5, please check neutron logs for more information. [ 643.459782] env[61629]: ERROR nova.compute.manager [ 643.459782] env[61629]: Traceback (most recent call last): [ 643.459782] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 643.459782] env[61629]: listener.cb(fileno) [ 643.459782] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 643.459782] env[61629]: result = function(*args, **kwargs) [ 643.459782] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 643.459782] env[61629]: return func(*args, **kwargs) [ 643.459782] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 643.459782] env[61629]: raise e [ 643.459782] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 643.459782] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 643.459782] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 643.459782] env[61629]: created_port_ids = self._update_ports_for_instance( [ 643.459782] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 643.459782] env[61629]: with excutils.save_and_reraise_exception(): [ 643.459782] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.459782] env[61629]: self.force_reraise() [ 643.459782] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.459782] env[61629]: raise self.value [ 643.459782] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 643.459782] env[61629]: updated_port = self._update_port( [ 643.459782] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.459782] env[61629]: _ensure_no_port_binding_failure(port) [ 643.459782] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.459782] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 643.460449] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 683a0eb5-ccf5-4e74-9beb-82cbf25490b5, please check neutron logs for more information. [ 643.460449] env[61629]: Removing descriptor: 15 [ 643.460449] env[61629]: ERROR nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 683a0eb5-ccf5-4e74-9beb-82cbf25490b5, please check neutron logs for more information. [ 643.460449] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Traceback (most recent call last): [ 643.460449] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 643.460449] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] yield resources [ 643.460449] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 643.460449] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] self.driver.spawn(context, instance, image_meta, [ 643.460449] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 643.460449] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 643.460449] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 643.460449] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] vm_ref = self.build_virtual_machine(instance, [ 643.460735] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 643.460735] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] vif_infos = vmwarevif.get_vif_info(self._session, [ 643.460735] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 643.460735] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] for vif in network_info: [ 643.460735] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 643.460735] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] return self._sync_wrapper(fn, *args, **kwargs) [ 643.460735] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 643.460735] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] self.wait() [ 643.460735] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 643.460735] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] self[:] = self._gt.wait() [ 643.460735] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 643.460735] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] return self._exit_event.wait() [ 643.460735] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 643.461282] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] result = hub.switch() [ 643.461282] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 643.461282] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] return self.greenlet.switch() [ 643.461282] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 643.461282] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] result = function(*args, **kwargs) [ 643.461282] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 643.461282] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] return func(*args, **kwargs) [ 643.461282] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 643.461282] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] raise e [ 643.461282] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 643.461282] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] nwinfo = self.network_api.allocate_for_instance( [ 643.461282] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 643.461282] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] created_port_ids = self._update_ports_for_instance( [ 643.461638] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 643.461638] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] with excutils.save_and_reraise_exception(): [ 643.461638] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.461638] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] self.force_reraise() [ 643.461638] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.461638] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] raise self.value [ 643.461638] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 643.461638] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] updated_port = self._update_port( [ 643.461638] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.461638] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] _ensure_no_port_binding_failure(port) [ 643.461638] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.461638] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] raise exception.PortBindingFailed(port_id=port['id']) [ 643.461935] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] nova.exception.PortBindingFailed: Binding failed for port 683a0eb5-ccf5-4e74-9beb-82cbf25490b5, please check neutron logs for more information. [ 643.461935] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] [ 643.461935] env[61629]: INFO nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Terminating instance [ 643.462658] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Acquiring lock "refresh_cache-d43d47a2-a27b-4bb8-9421-61805064a3d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.665667] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 643.665808] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 643.665966] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 643.666070] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Didn't find any instances for network info cache update. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 643.666254] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.666417] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.666601] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.666759] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.666962] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.667048] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.667193] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61629) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 643.667340] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager.update_available_resource {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.696058] env[61629]: DEBUG nova.network.neutron [req-b542762b-4d66-4991-a889-1e7a52b7785d req-2f1a3fe2-37d1-4935-a189-70a40cd89598 service nova] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 643.886198] env[61629]: DEBUG nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 643.918336] env[61629]: DEBUG nova.virt.hardware [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 643.918566] env[61629]: DEBUG nova.virt.hardware [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 643.918740] env[61629]: DEBUG nova.virt.hardware [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 643.920254] env[61629]: DEBUG nova.virt.hardware [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 643.920254] env[61629]: DEBUG nova.virt.hardware [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 643.920254] env[61629]: DEBUG nova.virt.hardware [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 643.920254] env[61629]: DEBUG nova.virt.hardware [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 643.920254] env[61629]: DEBUG nova.virt.hardware [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 643.920738] env[61629]: DEBUG nova.virt.hardware [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 643.920738] env[61629]: DEBUG nova.virt.hardware [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 643.920738] env[61629]: DEBUG nova.virt.hardware [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 643.920901] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b4cc54-a573-4593-833b-1b7ba56f059b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.933059] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ae05e2-8e0e-485d-af6f-0f08a35b104f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.961033] env[61629]: DEBUG nova.scheduler.client.report [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 643.987174] env[61629]: DEBUG nova.network.neutron [req-b542762b-4d66-4991-a889-1e7a52b7785d req-2f1a3fe2-37d1-4935-a189-70a40cd89598 service nova] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.171260] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.464299] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.465445] env[61629]: DEBUG nova.compute.manager [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 644.469019] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.429s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.470412] env[61629]: INFO nova.compute.claims [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 644.492895] env[61629]: DEBUG oslo_concurrency.lockutils [req-b542762b-4d66-4991-a889-1e7a52b7785d req-2f1a3fe2-37d1-4935-a189-70a40cd89598 service nova] Releasing lock "refresh_cache-d43d47a2-a27b-4bb8-9421-61805064a3d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.493403] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Acquired lock "refresh_cache-d43d47a2-a27b-4bb8-9421-61805064a3d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.493591] env[61629]: DEBUG nova.network.neutron [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 644.733246] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Acquiring lock "853f3cd8-c874-45e8-9e89-ee897dea87a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.734032] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Lock "853f3cd8-c874-45e8-9e89-ee897dea87a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.986421] env[61629]: DEBUG nova.compute.utils [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 644.987618] env[61629]: DEBUG nova.compute.manager [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Not allocating networking since 'none' was specified. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 645.041346] env[61629]: DEBUG nova.network.neutron [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 645.095336] env[61629]: DEBUG nova.compute.manager [req-54927773-fbda-42d8-ace2-60a491ea070a req-7eeee262-fe22-4a39-89c7-a2e37c2f5d2b service nova] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Received event network-vif-deleted-683a0eb5-ccf5-4e74-9beb-82cbf25490b5 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 645.299676] env[61629]: DEBUG nova.network.neutron [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.456820] env[61629]: DEBUG nova.compute.manager [req-6c379cc8-e58b-48d5-ac34-7eabe3c6f9ca req-f3650e62-9764-4abe-8cdd-95e01aae9e09 service nova] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Received event network-changed-5881b127-84c0-467c-99b3-a3b33be5a839 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 645.456938] env[61629]: DEBUG nova.compute.manager [req-6c379cc8-e58b-48d5-ac34-7eabe3c6f9ca req-f3650e62-9764-4abe-8cdd-95e01aae9e09 service nova] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Refreshing instance network info cache due to event network-changed-5881b127-84c0-467c-99b3-a3b33be5a839. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 645.457171] env[61629]: DEBUG oslo_concurrency.lockutils [req-6c379cc8-e58b-48d5-ac34-7eabe3c6f9ca req-f3650e62-9764-4abe-8cdd-95e01aae9e09 service nova] Acquiring lock "refresh_cache-4839c06e-f55a-4162-8eae-cfaeae07cdae" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.457307] env[61629]: DEBUG oslo_concurrency.lockutils [req-6c379cc8-e58b-48d5-ac34-7eabe3c6f9ca req-f3650e62-9764-4abe-8cdd-95e01aae9e09 service nova] Acquired lock "refresh_cache-4839c06e-f55a-4162-8eae-cfaeae07cdae" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.457460] env[61629]: DEBUG nova.network.neutron [req-6c379cc8-e58b-48d5-ac34-7eabe3c6f9ca req-f3650e62-9764-4abe-8cdd-95e01aae9e09 service nova] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Refreshing network info cache for port 5881b127-84c0-467c-99b3-a3b33be5a839 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 645.489791] env[61629]: DEBUG nova.compute.manager [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 645.802774] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Releasing lock "refresh_cache-d43d47a2-a27b-4bb8-9421-61805064a3d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.803260] env[61629]: DEBUG nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 645.803461] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 645.803764] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-509583b5-47d6-47e9-a46c-f4850794bc4d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.824683] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08591541-404e-4ad8-b0d7-c9a493147bc5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.855836] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d43d47a2-a27b-4bb8-9421-61805064a3d2 could not be found. [ 645.856280] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 645.856515] env[61629]: INFO nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Took 0.05 seconds to destroy the instance on the hypervisor. [ 645.856781] env[61629]: DEBUG oslo.service.loopingcall [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 645.857030] env[61629]: DEBUG nova.compute.manager [-] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 645.857121] env[61629]: DEBUG nova.network.neutron [-] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 645.890677] env[61629]: DEBUG nova.network.neutron [-] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 645.987951] env[61629]: DEBUG nova.network.neutron [req-6c379cc8-e58b-48d5-ac34-7eabe3c6f9ca req-f3650e62-9764-4abe-8cdd-95e01aae9e09 service nova] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 646.022235] env[61629]: ERROR nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5881b127-84c0-467c-99b3-a3b33be5a839, please check neutron logs for more information. [ 646.022235] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 646.022235] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 646.022235] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 646.022235] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 646.022235] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 646.022235] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 646.022235] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 646.022235] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.022235] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 646.022235] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.022235] env[61629]: ERROR nova.compute.manager raise self.value [ 646.022235] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 646.022235] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 646.022235] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.022235] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 646.022661] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.022661] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 646.022661] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5881b127-84c0-467c-99b3-a3b33be5a839, please check neutron logs for more information. [ 646.022661] env[61629]: ERROR nova.compute.manager [ 646.022661] env[61629]: Traceback (most recent call last): [ 646.022661] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 646.022661] env[61629]: listener.cb(fileno) [ 646.022661] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 646.022661] env[61629]: result = function(*args, **kwargs) [ 646.022661] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 646.022661] env[61629]: return func(*args, **kwargs) [ 646.022661] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 646.022661] env[61629]: raise e [ 646.022661] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 646.022661] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 646.022661] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 646.022661] env[61629]: created_port_ids = self._update_ports_for_instance( [ 646.022661] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 646.022661] env[61629]: with excutils.save_and_reraise_exception(): [ 646.022661] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.022661] env[61629]: self.force_reraise() [ 646.022661] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.022661] env[61629]: raise self.value [ 646.022661] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 646.022661] env[61629]: updated_port = self._update_port( [ 646.022661] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.022661] env[61629]: _ensure_no_port_binding_failure(port) [ 646.022661] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.022661] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 646.023701] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 5881b127-84c0-467c-99b3-a3b33be5a839, please check neutron logs for more information. [ 646.023701] env[61629]: Removing descriptor: 21 [ 646.023701] env[61629]: ERROR nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5881b127-84c0-467c-99b3-a3b33be5a839, please check neutron logs for more information. [ 646.023701] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Traceback (most recent call last): [ 646.023701] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 646.023701] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] yield resources [ 646.023701] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 646.023701] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] self.driver.spawn(context, instance, image_meta, [ 646.023701] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 646.023701] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 646.023701] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 646.023701] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] vm_ref = self.build_virtual_machine(instance, [ 646.024054] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 646.024054] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] vif_infos = vmwarevif.get_vif_info(self._session, [ 646.024054] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 646.024054] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] for vif in network_info: [ 646.024054] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 646.024054] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] return self._sync_wrapper(fn, *args, **kwargs) [ 646.024054] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 646.024054] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] self.wait() [ 646.024054] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 646.024054] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] self[:] = self._gt.wait() [ 646.024054] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 646.024054] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] return self._exit_event.wait() [ 646.024054] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 646.024368] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] result = hub.switch() [ 646.024368] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 646.024368] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] return self.greenlet.switch() [ 646.024368] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 646.024368] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] result = function(*args, **kwargs) [ 646.024368] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 646.024368] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] return func(*args, **kwargs) [ 646.024368] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 646.024368] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] raise e [ 646.024368] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 646.024368] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] nwinfo = self.network_api.allocate_for_instance( [ 646.024368] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 646.024368] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] created_port_ids = self._update_ports_for_instance( [ 646.024787] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 646.024787] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] with excutils.save_and_reraise_exception(): [ 646.024787] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.024787] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] self.force_reraise() [ 646.024787] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.024787] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] raise self.value [ 646.024787] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 646.024787] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] updated_port = self._update_port( [ 646.024787] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.024787] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] _ensure_no_port_binding_failure(port) [ 646.024787] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.024787] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] raise exception.PortBindingFailed(port_id=port['id']) [ 646.025093] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] nova.exception.PortBindingFailed: Binding failed for port 5881b127-84c0-467c-99b3-a3b33be5a839, please check neutron logs for more information. [ 646.025093] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] [ 646.025093] env[61629]: INFO nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Terminating instance [ 646.028632] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Acquiring lock "refresh_cache-4839c06e-f55a-4162-8eae-cfaeae07cdae" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.069018] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d29ade-41ba-4960-a1ec-0cbd04d66eaf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.077515] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23570e0a-01f5-4bd6-b8c9-5bda02a85f2c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.124533] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a9e3d2-91f8-4d52-9d2f-130a9d91e5b7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.130741] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5833097f-bc4d-49d2-85fa-307371c22880 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.144627] env[61629]: DEBUG nova.compute.provider_tree [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.392095] env[61629]: DEBUG nova.network.neutron [-] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.404708] env[61629]: DEBUG nova.network.neutron [req-6c379cc8-e58b-48d5-ac34-7eabe3c6f9ca req-f3650e62-9764-4abe-8cdd-95e01aae9e09 service nova] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.506849] env[61629]: DEBUG nova.compute.manager [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 646.543316] env[61629]: DEBUG nova.virt.hardware [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 646.543316] env[61629]: DEBUG nova.virt.hardware [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 646.543473] env[61629]: DEBUG nova.virt.hardware [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 646.543636] env[61629]: DEBUG nova.virt.hardware [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 646.543918] env[61629]: DEBUG nova.virt.hardware [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 646.544019] env[61629]: DEBUG nova.virt.hardware [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 646.544593] env[61629]: DEBUG nova.virt.hardware [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 646.544858] env[61629]: DEBUG nova.virt.hardware [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 646.544955] env[61629]: DEBUG nova.virt.hardware [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 646.545141] env[61629]: DEBUG nova.virt.hardware [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 646.545361] env[61629]: DEBUG nova.virt.hardware [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 646.546244] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48690a79-2407-48a3-8e13-b67761d7444d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.557658] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0dcbfbf-88f8-41d3-9440-64388bb87986 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.577650] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Instance VIF info [] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 646.583937] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Creating folder: Project (86b44c67da684453a53bce028ba7738a). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 646.584301] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6f8af29-b19c-424e-b75f-c2bc79c38321 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.596839] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Created folder: Project (86b44c67da684453a53bce028ba7738a) in parent group-v288443. [ 646.597039] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Creating folder: Instances. Parent ref: group-v288460. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 646.597282] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dacf66b4-c496-46a1-8ebb-52740f6154c9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.606237] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Created folder: Instances in parent group-v288460. [ 646.606530] env[61629]: DEBUG oslo.service.loopingcall [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 646.606736] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 646.606946] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-58b564a1-a371-4b51-ab89-b22170f4758c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.625492] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 646.625492] env[61629]: value = "task-1354019" [ 646.625492] env[61629]: _type = "Task" [ 646.625492] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.633296] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354019, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.649066] env[61629]: DEBUG nova.scheduler.client.report [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 646.897871] env[61629]: INFO nova.compute.manager [-] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Took 1.04 seconds to deallocate network for instance. [ 646.900688] env[61629]: DEBUG nova.compute.claims [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 646.901075] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.907690] env[61629]: DEBUG oslo_concurrency.lockutils [req-6c379cc8-e58b-48d5-ac34-7eabe3c6f9ca req-f3650e62-9764-4abe-8cdd-95e01aae9e09 service nova] Releasing lock "refresh_cache-4839c06e-f55a-4162-8eae-cfaeae07cdae" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.908239] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Acquired lock "refresh_cache-4839c06e-f55a-4162-8eae-cfaeae07cdae" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.908448] env[61629]: DEBUG nova.network.neutron [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 647.140427] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354019, 'name': CreateVM_Task, 'duration_secs': 0.274642} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.140807] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 647.142348] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.142348] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.144114] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 647.144114] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c2b9420-5647-4226-8fe5-9b2b8d81bd20 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.148832] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 647.148832] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52015d4a-d4c6-46fe-a7e7-3852bbc157c0" [ 647.148832] env[61629]: _type = "Task" [ 647.148832] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.154661] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.686s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.155315] env[61629]: DEBUG nova.compute.manager [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 647.164827] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.075s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.165180] env[61629]: INFO nova.compute.claims [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 647.168320] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52015d4a-d4c6-46fe-a7e7-3852bbc157c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.431244] env[61629]: DEBUG nova.network.neutron [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.563734] env[61629]: DEBUG nova.network.neutron [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.661644] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52015d4a-d4c6-46fe-a7e7-3852bbc157c0, 'name': SearchDatastore_Task, 'duration_secs': 0.010101} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.664213] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.664427] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 647.664624] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.664783] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.664966] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 647.665273] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe3b2676-2dd7-4b9b-b021-e1c4801ef568 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.672299] env[61629]: DEBUG nova.compute.utils [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 647.676111] env[61629]: DEBUG nova.compute.manager [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Not allocating networking since 'none' was specified. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 647.676111] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 647.676111] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 647.678670] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef790e1f-ab17-4cfa-b8f6-5e94f06bfcce {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.684735] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 647.684735] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52e3425c-193b-97e2-8a35-65c51db4ce8e" [ 647.684735] env[61629]: _type = "Task" [ 647.684735] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.699490] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52e3425c-193b-97e2-8a35-65c51db4ce8e, 'name': SearchDatastore_Task, 'duration_secs': 0.008162} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.700660] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d34f1e04-d4be-423d-aa35-ac96e80c861e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.712926] env[61629]: DEBUG nova.compute.manager [req-979f9b44-00f3-46b8-b406-7b81648892fe req-46515e8d-9cbc-4d27-aa51-6b2602f1e36e service nova] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Received event network-vif-deleted-5881b127-84c0-467c-99b3-a3b33be5a839 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 647.713643] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 647.713643] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5288f2f0-7a7c-05df-17ea-d0afea026877" [ 647.713643] env[61629]: _type = "Task" [ 647.713643] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.723764] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5288f2f0-7a7c-05df-17ea-d0afea026877, 'name': SearchDatastore_Task, 'duration_secs': 0.009335} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.724096] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.724358] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] dd406dd1-0e19-400b-a862-ae51fd134017/dd406dd1-0e19-400b-a862-ae51fd134017.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 647.724614] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-be7eb093-ddb4-45dd-8adf-f74faf50b001 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.735808] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 647.735808] env[61629]: value = "task-1354020" [ 647.735808] env[61629]: _type = "Task" [ 647.735808] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.750115] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354020, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.069616] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Releasing lock "refresh_cache-4839c06e-f55a-4162-8eae-cfaeae07cdae" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.070078] env[61629]: DEBUG nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 648.070271] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 648.070581] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-675269f6-398d-47dc-8c7b-89ba5c877232 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.084214] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13d8336-bdcb-45d0-89c2-0f80deb4c2f5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.107762] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4839c06e-f55a-4162-8eae-cfaeae07cdae could not be found. [ 648.108110] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 648.108318] env[61629]: INFO nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Took 0.04 seconds to destroy the instance on the hypervisor. [ 648.108592] env[61629]: DEBUG oslo.service.loopingcall [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 648.108852] env[61629]: DEBUG nova.compute.manager [-] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 648.108943] env[61629]: DEBUG nova.network.neutron [-] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 648.150059] env[61629]: DEBUG nova.network.neutron [-] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.180199] env[61629]: DEBUG nova.compute.manager [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 648.254961] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354020, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485747} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.255256] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] dd406dd1-0e19-400b-a862-ae51fd134017/dd406dd1-0e19-400b-a862-ae51fd134017.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 648.255463] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 648.255708] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9ddc56f1-cb12-4112-8110-b971252ed444 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.265495] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 648.265495] env[61629]: value = "task-1354021" [ 648.265495] env[61629]: _type = "Task" [ 648.265495] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.275771] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354021, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.663563] env[61629]: DEBUG nova.network.neutron [-] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.784223] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354021, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064933} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.784223] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 648.784223] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c284db57-70a9-431d-a782-2bf20fd157d9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.816105] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] dd406dd1-0e19-400b-a862-ae51fd134017/dd406dd1-0e19-400b-a862-ae51fd134017.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 648.819323] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-886e081c-75f5-49b0-a953-bfe1ccb06698 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.846458] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 648.846458] env[61629]: value = "task-1354022" [ 648.846458] env[61629]: _type = "Task" [ 648.846458] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.859435] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354022, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.898569] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02d501f-b176-4704-b083-a469d0e823f0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.911695] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82481e4a-3e56-434f-a218-c6cd8d7ab7a6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.940201] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8115659-0964-4fde-a4d8-4e40611644b6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.949196] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52384b5a-313e-4994-9b7d-cfa77a42c0ea {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.963084] env[61629]: DEBUG nova.compute.provider_tree [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.169469] env[61629]: INFO nova.compute.manager [-] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Took 1.06 seconds to deallocate network for instance. [ 649.172241] env[61629]: DEBUG nova.compute.claims [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 649.172560] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.203143] env[61629]: DEBUG nova.compute.manager [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 649.239365] env[61629]: DEBUG nova.virt.hardware [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 649.239630] env[61629]: DEBUG nova.virt.hardware [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 649.239791] env[61629]: DEBUG nova.virt.hardware [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.239967] env[61629]: DEBUG nova.virt.hardware [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 649.240125] env[61629]: DEBUG nova.virt.hardware [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.240269] env[61629]: DEBUG nova.virt.hardware [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 649.240475] env[61629]: DEBUG nova.virt.hardware [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 649.240631] env[61629]: DEBUG nova.virt.hardware [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 649.240794] env[61629]: DEBUG nova.virt.hardware [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 649.240970] env[61629]: DEBUG nova.virt.hardware [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 649.242217] env[61629]: DEBUG nova.virt.hardware [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 649.242217] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6aeb68-154a-4a7e-8f9a-a64cf01f8674 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.251988] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-876619dd-86cd-4ebf-ba6d-882c3a5bfdf5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.267250] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Instance VIF info [] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 649.273372] env[61629]: DEBUG oslo.service.loopingcall [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 649.273631] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 649.273839] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2bbc134-c6b5-4d7c-8a61-b551add69bc1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.299972] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 649.299972] env[61629]: value = "task-1354023" [ 649.299972] env[61629]: _type = "Task" [ 649.299972] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.314083] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354023, 'name': CreateVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.357276] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354022, 'name': ReconfigVM_Task, 'duration_secs': 0.308507} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.359863] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Reconfigured VM instance instance-00000019 to attach disk [datastore2] dd406dd1-0e19-400b-a862-ae51fd134017/dd406dd1-0e19-400b-a862-ae51fd134017.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 649.359863] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-112f717a-508b-4000-bf38-a99df14d1322 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.365503] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 649.365503] env[61629]: value = "task-1354024" [ 649.365503] env[61629]: _type = "Task" [ 649.365503] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.375984] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354024, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.467427] env[61629]: DEBUG nova.scheduler.client.report [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 649.813808] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354023, 'name': CreateVM_Task, 'duration_secs': 0.305077} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.814103] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 649.815455] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.815455] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.815455] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 649.815455] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84436b71-d10f-4387-b8f8-c3f28b5b70c5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.820264] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 649.820264] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52d80bc9-7cf5-4662-c85c-37fb656a2dc8" [ 649.820264] env[61629]: _type = "Task" [ 649.820264] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.829241] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d80bc9-7cf5-4662-c85c-37fb656a2dc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.876365] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354024, 'name': Rename_Task, 'duration_secs': 0.141288} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.876919] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 649.877614] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-523359df-cb4d-4ec1-8906-75e886a2a494 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.883778] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 649.883778] env[61629]: value = "task-1354028" [ 649.883778] env[61629]: _type = "Task" [ 649.883778] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.892554] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354028, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.973910] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.811s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.974505] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 649.977299] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.665s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.978837] env[61629]: INFO nova.compute.claims [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.223955] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "3b7866fb-213a-46a7-b31c-4ce5598591c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.224286] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "3b7866fb-213a-46a7-b31c-4ce5598591c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.331730] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d80bc9-7cf5-4662-c85c-37fb656a2dc8, 'name': SearchDatastore_Task, 'duration_secs': 0.009485} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.331730] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.331730] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 650.332039] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.332196] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.332407] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 650.332666] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc9ab28a-10ac-4945-b5d3-de6fcc5b131e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.343899] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 650.344119] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 650.345036] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1844f574-2811-4efe-bd42-b2b94a89b714 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.350390] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 650.350390] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]527fe80e-328e-54fc-fc15-f2fd334bc514" [ 650.350390] env[61629]: _type = "Task" [ 650.350390] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.367600] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]527fe80e-328e-54fc-fc15-f2fd334bc514, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.394988] env[61629]: DEBUG oslo_vmware.api [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354028, 'name': PowerOnVM_Task, 'duration_secs': 0.507814} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.395623] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 650.395623] env[61629]: INFO nova.compute.manager [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Took 3.89 seconds to spawn the instance on the hypervisor. [ 650.395623] env[61629]: DEBUG nova.compute.manager [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 650.396866] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9419f3c-6049-4713-b36b-f67850bb7965 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.483443] env[61629]: DEBUG nova.compute.utils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 650.489041] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 650.489041] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 650.557834] env[61629]: DEBUG nova.policy [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5fe4bfac55cf40e79f7d54206afde0d6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d3e9a414c944234a52993d63b42e53f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 650.846280] env[61629]: DEBUG oslo_concurrency.lockutils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Acquiring lock "c3724b2e-4f6b-4db5-a68f-41e410e561e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.846614] env[61629]: DEBUG oslo_concurrency.lockutils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Lock "c3724b2e-4f6b-4db5-a68f-41e410e561e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.862089] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]527fe80e-328e-54fc-fc15-f2fd334bc514, 'name': SearchDatastore_Task, 'duration_secs': 0.012856} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.863156] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0450ee65-bca9-4a13-8557-ca7ff11080e2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.870053] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 650.870053] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]529c2b85-7954-0a1b-0b48-81397606dd0d" [ 650.870053] env[61629]: _type = "Task" [ 650.870053] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.878034] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]529c2b85-7954-0a1b-0b48-81397606dd0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.913161] env[61629]: INFO nova.compute.manager [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Took 24.48 seconds to build instance. [ 650.988610] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 650.997839] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Successfully created port: 1b89f1d7-2c1f-40fa-a812-8ab8ced966d1 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 651.387849] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]529c2b85-7954-0a1b-0b48-81397606dd0d, 'name': SearchDatastore_Task, 'duration_secs': 0.027369} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.390416] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.390686] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 2315bd37-6151-42d7-8b54-9ee367be0ed1/2315bd37-6151-42d7-8b54-9ee367be0ed1.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 651.391159] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd0a74da-a23a-45df-b5c4-8eab7b8d6088 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.399597] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 651.399597] env[61629]: value = "task-1354029" [ 651.399597] env[61629]: _type = "Task" [ 651.399597] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.418252] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd7c208e-af06-46f1-9562-257c529b6aa5 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "dd406dd1-0e19-400b-a862-ae51fd134017" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.915s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.426099] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354029, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.550784] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0051b808-3924-4645-a494-0458ba6ee729 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.558698] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859a5531-a87d-4a8c-a1e2-82234b46e103 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.592677] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7dd3a0e-d847-4775-a0d5-90c2965bd443 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.600473] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb73c60b-408a-46a9-ae9b-69f4317314f4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.614681] env[61629]: DEBUG nova.compute.provider_tree [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.911019] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354029, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.926457] env[61629]: DEBUG nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 652.002922] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 652.033495] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 652.033794] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 652.033920] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 652.034436] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 652.034663] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 652.034821] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 652.035082] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 652.035403] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 652.035403] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 652.035616] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 652.035826] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 652.036779] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae20b57-4bab-4dc4-82d0-cfb7369ecc4e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.045519] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cddef15e-cdde-490e-9f67-21c4c47591c0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.118555] env[61629]: DEBUG nova.scheduler.client.report [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 652.344149] env[61629]: DEBUG nova.compute.manager [req-84371479-a8c6-47be-91f1-7814d078092a req-23f6b36c-f1b5-4a46-9dad-099b52b29558 service nova] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Received event network-changed-1b89f1d7-2c1f-40fa-a812-8ab8ced966d1 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 652.346655] env[61629]: DEBUG nova.compute.manager [req-84371479-a8c6-47be-91f1-7814d078092a req-23f6b36c-f1b5-4a46-9dad-099b52b29558 service nova] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Refreshing instance network info cache due to event network-changed-1b89f1d7-2c1f-40fa-a812-8ab8ced966d1. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 652.346655] env[61629]: DEBUG oslo_concurrency.lockutils [req-84371479-a8c6-47be-91f1-7814d078092a req-23f6b36c-f1b5-4a46-9dad-099b52b29558 service nova] Acquiring lock "refresh_cache-be2db738-cfe9-4720-b348-c7b03f28e96b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.346655] env[61629]: DEBUG oslo_concurrency.lockutils [req-84371479-a8c6-47be-91f1-7814d078092a req-23f6b36c-f1b5-4a46-9dad-099b52b29558 service nova] Acquired lock "refresh_cache-be2db738-cfe9-4720-b348-c7b03f28e96b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.346655] env[61629]: DEBUG nova.network.neutron [req-84371479-a8c6-47be-91f1-7814d078092a req-23f6b36c-f1b5-4a46-9dad-099b52b29558 service nova] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Refreshing network info cache for port 1b89f1d7-2c1f-40fa-a812-8ab8ced966d1 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 652.413109] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354029, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.80379} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.413109] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 2315bd37-6151-42d7-8b54-9ee367be0ed1/2315bd37-6151-42d7-8b54-9ee367be0ed1.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 652.413109] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 652.414055] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-48ba0a71-c551-4911-af09-78bbab785a8a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.421241] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 652.421241] env[61629]: value = "task-1354030" [ 652.421241] env[61629]: _type = "Task" [ 652.421241] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.434984] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354030, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.458477] env[61629]: DEBUG oslo_concurrency.lockutils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.484884] env[61629]: ERROR nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1b89f1d7-2c1f-40fa-a812-8ab8ced966d1, please check neutron logs for more information. [ 652.484884] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 652.484884] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 652.484884] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 652.484884] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.484884] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 652.484884] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.484884] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 652.484884] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.484884] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 652.484884] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.484884] env[61629]: ERROR nova.compute.manager raise self.value [ 652.484884] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.484884] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 652.484884] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.484884] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 652.485652] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.485652] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 652.485652] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1b89f1d7-2c1f-40fa-a812-8ab8ced966d1, please check neutron logs for more information. [ 652.485652] env[61629]: ERROR nova.compute.manager [ 652.485652] env[61629]: Traceback (most recent call last): [ 652.485652] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 652.485652] env[61629]: listener.cb(fileno) [ 652.485652] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 652.485652] env[61629]: result = function(*args, **kwargs) [ 652.485652] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 652.485652] env[61629]: return func(*args, **kwargs) [ 652.485652] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 652.485652] env[61629]: raise e [ 652.485652] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 652.485652] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 652.485652] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.485652] env[61629]: created_port_ids = self._update_ports_for_instance( [ 652.485652] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.485652] env[61629]: with excutils.save_and_reraise_exception(): [ 652.485652] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.485652] env[61629]: self.force_reraise() [ 652.485652] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.485652] env[61629]: raise self.value [ 652.485652] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.485652] env[61629]: updated_port = self._update_port( [ 652.485652] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.485652] env[61629]: _ensure_no_port_binding_failure(port) [ 652.485652] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.485652] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 652.486801] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 1b89f1d7-2c1f-40fa-a812-8ab8ced966d1, please check neutron logs for more information. [ 652.486801] env[61629]: Removing descriptor: 15 [ 652.486801] env[61629]: ERROR nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1b89f1d7-2c1f-40fa-a812-8ab8ced966d1, please check neutron logs for more information. [ 652.486801] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Traceback (most recent call last): [ 652.486801] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 652.486801] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] yield resources [ 652.486801] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 652.486801] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] self.driver.spawn(context, instance, image_meta, [ 652.486801] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 652.486801] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 652.486801] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 652.486801] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] vm_ref = self.build_virtual_machine(instance, [ 652.487315] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 652.487315] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] vif_infos = vmwarevif.get_vif_info(self._session, [ 652.487315] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 652.487315] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] for vif in network_info: [ 652.487315] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 652.487315] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] return self._sync_wrapper(fn, *args, **kwargs) [ 652.487315] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 652.487315] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] self.wait() [ 652.487315] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 652.487315] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] self[:] = self._gt.wait() [ 652.487315] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 652.487315] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] return self._exit_event.wait() [ 652.487315] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 652.487853] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] result = hub.switch() [ 652.487853] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 652.487853] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] return self.greenlet.switch() [ 652.487853] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 652.487853] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] result = function(*args, **kwargs) [ 652.487853] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 652.487853] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] return func(*args, **kwargs) [ 652.487853] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 652.487853] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] raise e [ 652.487853] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 652.487853] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] nwinfo = self.network_api.allocate_for_instance( [ 652.487853] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.487853] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] created_port_ids = self._update_ports_for_instance( [ 652.488789] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.488789] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] with excutils.save_and_reraise_exception(): [ 652.488789] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.488789] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] self.force_reraise() [ 652.488789] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.488789] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] raise self.value [ 652.488789] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.488789] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] updated_port = self._update_port( [ 652.488789] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.488789] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] _ensure_no_port_binding_failure(port) [ 652.488789] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.488789] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] raise exception.PortBindingFailed(port_id=port['id']) [ 652.489338] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] nova.exception.PortBindingFailed: Binding failed for port 1b89f1d7-2c1f-40fa-a812-8ab8ced966d1, please check neutron logs for more information. [ 652.489338] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] [ 652.489338] env[61629]: INFO nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Terminating instance [ 652.490539] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "refresh_cache-be2db738-cfe9-4720-b348-c7b03f28e96b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.628622] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.651s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.629291] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 652.635900] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.226s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.635900] env[61629]: INFO nova.compute.claims [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 652.871634] env[61629]: DEBUG nova.network.neutron [req-84371479-a8c6-47be-91f1-7814d078092a req-23f6b36c-f1b5-4a46-9dad-099b52b29558 service nova] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.931325] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354030, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064505} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.931647] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 652.932463] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034cc5f1-58d4-4cb5-b784-b5bda79be779 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.952520] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] 2315bd37-6151-42d7-8b54-9ee367be0ed1/2315bd37-6151-42d7-8b54-9ee367be0ed1.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 652.956038] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c781a67e-c5b3-45c9-a042-cbc120f79cea {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.977421] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 652.977421] env[61629]: value = "task-1354032" [ 652.977421] env[61629]: _type = "Task" [ 652.977421] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.988464] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354032, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.011870] env[61629]: DEBUG nova.network.neutron [req-84371479-a8c6-47be-91f1-7814d078092a req-23f6b36c-f1b5-4a46-9dad-099b52b29558 service nova] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.134076] env[61629]: DEBUG nova.compute.utils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 653.136112] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 653.136831] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 653.183203] env[61629]: DEBUG nova.policy [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5fe4bfac55cf40e79f7d54206afde0d6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d3e9a414c944234a52993d63b42e53f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 653.488780] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354032, 'name': ReconfigVM_Task, 'duration_secs': 0.322708} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.489263] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Reconfigured VM instance instance-0000001a to attach disk [datastore2] 2315bd37-6151-42d7-8b54-9ee367be0ed1/2315bd37-6151-42d7-8b54-9ee367be0ed1.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 653.490097] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-872aab73-f825-4eff-b3d8-cbca9a50ea74 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.497694] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 653.497694] env[61629]: value = "task-1354033" [ 653.497694] env[61629]: _type = "Task" [ 653.497694] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.509323] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354033, 'name': Rename_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.516683] env[61629]: DEBUG oslo_concurrency.lockutils [req-84371479-a8c6-47be-91f1-7814d078092a req-23f6b36c-f1b5-4a46-9dad-099b52b29558 service nova] Releasing lock "refresh_cache-be2db738-cfe9-4720-b348-c7b03f28e96b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.516683] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquired lock "refresh_cache-be2db738-cfe9-4720-b348-c7b03f28e96b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.516683] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 653.562530] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Successfully created port: ccaa17cb-d02f-4f92-bf54-7140be7e5cd8 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 653.638619] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 654.012150] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354033, 'name': Rename_Task, 'duration_secs': 0.174348} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.012645] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 654.015435] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8fde63e-31e7-43ac-b2f1-9500b9cbd8cb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.025155] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 654.025155] env[61629]: value = "task-1354034" [ 654.025155] env[61629]: _type = "Task" [ 654.025155] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.034525] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354034, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.045064] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 654.177403] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eebbe5b6-a78a-4cf4-9673-b2f675620fbd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.179742] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.188881] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8de42f-3886-4c26-82de-9778b5314e4f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.225688] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5981be43-fe76-485a-868c-cc4d72946422 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.232521] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07be9bdd-d1d3-4855-a2da-2d36ef7aa000 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.246449] env[61629]: DEBUG nova.compute.provider_tree [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.373910] env[61629]: DEBUG nova.compute.manager [req-768829c8-f06f-49c4-bab6-39687554b167 req-be6f34f9-25bb-4709-b324-a58b6d94389e service nova] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Received event network-vif-deleted-1b89f1d7-2c1f-40fa-a812-8ab8ced966d1 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 654.537692] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354034, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.655314] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 654.685018] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 654.685257] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 654.685416] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 654.685597] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 654.685742] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 654.685896] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 654.687537] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 654.687537] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 654.687537] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 654.687676] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 654.687926] env[61629]: DEBUG nova.virt.hardware [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 654.691413] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Releasing lock "refresh_cache-be2db738-cfe9-4720-b348-c7b03f28e96b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.691413] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 654.691413] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 654.691413] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3259d72-a913-49d4-985a-425427232e24 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.693800] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f20968c-98ae-4bbd-bf61-330e81fa4b1f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.701999] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9def5273-59d6-4bb8-bb83-2a0a730b7420 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.708497] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a412a6-3c57-499c-8d41-c90eeaf5bd1e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.736497] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance be2db738-cfe9-4720-b348-c7b03f28e96b could not be found. [ 654.736497] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 654.736497] env[61629]: INFO nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 654.736497] env[61629]: DEBUG oslo.service.loopingcall [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 654.737550] env[61629]: ERROR nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ccaa17cb-d02f-4f92-bf54-7140be7e5cd8, please check neutron logs for more information. [ 654.737550] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 654.737550] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 654.737550] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 654.737550] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 654.737550] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 654.737550] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 654.737550] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 654.737550] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.737550] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 654.737550] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.737550] env[61629]: ERROR nova.compute.manager raise self.value [ 654.737550] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 654.737550] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 654.737550] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.737550] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 654.738566] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.738566] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 654.738566] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ccaa17cb-d02f-4f92-bf54-7140be7e5cd8, please check neutron logs for more information. [ 654.738566] env[61629]: ERROR nova.compute.manager [ 654.738566] env[61629]: Traceback (most recent call last): [ 654.738566] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 654.738566] env[61629]: listener.cb(fileno) [ 654.738566] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 654.738566] env[61629]: result = function(*args, **kwargs) [ 654.738566] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 654.738566] env[61629]: return func(*args, **kwargs) [ 654.738566] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 654.738566] env[61629]: raise e [ 654.738566] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 654.738566] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 654.738566] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 654.738566] env[61629]: created_port_ids = self._update_ports_for_instance( [ 654.738566] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 654.738566] env[61629]: with excutils.save_and_reraise_exception(): [ 654.738566] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.738566] env[61629]: self.force_reraise() [ 654.738566] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.738566] env[61629]: raise self.value [ 654.738566] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 654.738566] env[61629]: updated_port = self._update_port( [ 654.738566] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.738566] env[61629]: _ensure_no_port_binding_failure(port) [ 654.738566] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.738566] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 654.741339] env[61629]: nova.exception.PortBindingFailed: Binding failed for port ccaa17cb-d02f-4f92-bf54-7140be7e5cd8, please check neutron logs for more information. [ 654.741339] env[61629]: Removing descriptor: 15 [ 654.741339] env[61629]: DEBUG nova.compute.manager [-] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 654.741339] env[61629]: DEBUG nova.network.neutron [-] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 654.741339] env[61629]: ERROR nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ccaa17cb-d02f-4f92-bf54-7140be7e5cd8, please check neutron logs for more information. [ 654.741339] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Traceback (most recent call last): [ 654.741339] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 654.741339] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] yield resources [ 654.741339] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 654.741339] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] self.driver.spawn(context, instance, image_meta, [ 654.741339] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 654.741977] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 654.741977] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 654.741977] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] vm_ref = self.build_virtual_machine(instance, [ 654.741977] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 654.741977] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] vif_infos = vmwarevif.get_vif_info(self._session, [ 654.741977] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 654.741977] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] for vif in network_info: [ 654.741977] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 654.741977] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] return self._sync_wrapper(fn, *args, **kwargs) [ 654.741977] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 654.741977] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] self.wait() [ 654.741977] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 654.741977] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] self[:] = self._gt.wait() [ 654.742741] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 654.742741] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] return self._exit_event.wait() [ 654.742741] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 654.742741] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] result = hub.switch() [ 654.742741] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 654.742741] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] return self.greenlet.switch() [ 654.742741] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 654.742741] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] result = function(*args, **kwargs) [ 654.742741] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 654.742741] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] return func(*args, **kwargs) [ 654.742741] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 654.742741] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] raise e [ 654.742741] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 654.743346] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] nwinfo = self.network_api.allocate_for_instance( [ 654.743346] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 654.743346] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] created_port_ids = self._update_ports_for_instance( [ 654.743346] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 654.743346] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] with excutils.save_and_reraise_exception(): [ 654.743346] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.743346] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] self.force_reraise() [ 654.743346] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.743346] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] raise self.value [ 654.743346] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 654.743346] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] updated_port = self._update_port( [ 654.743346] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.743346] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] _ensure_no_port_binding_failure(port) [ 654.743797] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.743797] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] raise exception.PortBindingFailed(port_id=port['id']) [ 654.743797] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] nova.exception.PortBindingFailed: Binding failed for port ccaa17cb-d02f-4f92-bf54-7140be7e5cd8, please check neutron logs for more information. [ 654.743797] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] [ 654.743797] env[61629]: INFO nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Terminating instance [ 654.743797] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "refresh_cache-6ba7ca7d-173d-41d3-b523-3548a67397c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.743797] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquired lock "refresh_cache-6ba7ca7d-173d-41d3-b523-3548a67397c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.744177] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 654.749152] env[61629]: DEBUG nova.scheduler.client.report [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 654.756506] env[61629]: DEBUG nova.network.neutron [-] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.037097] env[61629]: DEBUG oslo_vmware.api [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354034, 'name': PowerOnVM_Task, 'duration_secs': 0.575474} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.037330] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 655.037517] env[61629]: INFO nova.compute.manager [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Took 5.83 seconds to spawn the instance on the hypervisor. [ 655.037691] env[61629]: DEBUG nova.compute.manager [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 655.038522] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e96e34-467f-4083-9a14-d5d09d456f52 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.255176] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.622s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.255868] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 655.258439] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.943s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.264265] env[61629]: INFO nova.compute.claims [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.264265] env[61629]: DEBUG nova.network.neutron [-] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.266433] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.360694] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.556406] env[61629]: INFO nova.compute.manager [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Took 26.54 seconds to build instance. [ 655.767330] env[61629]: INFO nova.compute.manager [-] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Took 1.03 seconds to deallocate network for instance. [ 655.772355] env[61629]: DEBUG nova.compute.utils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 655.772916] env[61629]: DEBUG nova.compute.claims [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 655.773363] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.778134] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 655.778134] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 655.816073] env[61629]: DEBUG nova.policy [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a53b3957c45f4788932118a04e52b7c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '433bada53f7f4020b3cd034c9645c776', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 655.864546] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Releasing lock "refresh_cache-6ba7ca7d-173d-41d3-b523-3548a67397c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.865410] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 655.865410] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 655.865541] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-265f64cf-a5ed-4a9f-9da5-5a08392aa313 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.874945] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3097e5-dd0d-48d5-bfdc-2d257544fde8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.898774] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6ba7ca7d-173d-41d3-b523-3548a67397c4 could not be found. [ 655.898994] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 655.899192] env[61629]: INFO nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Took 0.03 seconds to destroy the instance on the hypervisor. [ 655.899434] env[61629]: DEBUG oslo.service.loopingcall [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 655.899658] env[61629]: DEBUG nova.compute.manager [-] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 655.899767] env[61629]: DEBUG nova.network.neutron [-] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 655.915277] env[61629]: DEBUG nova.network.neutron [-] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 656.058569] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0c21891b-2e26-4429-af17-1a01d6262f00 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "2315bd37-6151-42d7-8b54-9ee367be0ed1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.096s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.107340] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Successfully created port: 15d38b73-42a2-4d0b-9550-84f7c88392ee {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 656.126447] env[61629]: INFO nova.compute.manager [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Rebuilding instance [ 656.174033] env[61629]: DEBUG nova.compute.manager [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 656.174269] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d469e8-c474-4548-aa1b-e10b14ed84fc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.275674] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 656.418257] env[61629]: DEBUG nova.network.neutron [-] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.428840] env[61629]: DEBUG nova.compute.manager [req-6da158d8-3989-4991-95bb-f17607fe50b8 req-b49e76c0-4900-4573-9349-380f862eab1d service nova] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Received event network-changed-ccaa17cb-d02f-4f92-bf54-7140be7e5cd8 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 656.429047] env[61629]: DEBUG nova.compute.manager [req-6da158d8-3989-4991-95bb-f17607fe50b8 req-b49e76c0-4900-4573-9349-380f862eab1d service nova] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Refreshing instance network info cache due to event network-changed-ccaa17cb-d02f-4f92-bf54-7140be7e5cd8. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 656.429270] env[61629]: DEBUG oslo_concurrency.lockutils [req-6da158d8-3989-4991-95bb-f17607fe50b8 req-b49e76c0-4900-4573-9349-380f862eab1d service nova] Acquiring lock "refresh_cache-6ba7ca7d-173d-41d3-b523-3548a67397c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.429429] env[61629]: DEBUG oslo_concurrency.lockutils [req-6da158d8-3989-4991-95bb-f17607fe50b8 req-b49e76c0-4900-4573-9349-380f862eab1d service nova] Acquired lock "refresh_cache-6ba7ca7d-173d-41d3-b523-3548a67397c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.429620] env[61629]: DEBUG nova.network.neutron [req-6da158d8-3989-4991-95bb-f17607fe50b8 req-b49e76c0-4900-4573-9349-380f862eab1d service nova] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Refreshing network info cache for port ccaa17cb-d02f-4f92-bf54-7140be7e5cd8 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 656.561737] env[61629]: DEBUG nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 656.685483] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 656.686896] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d02c7a89-8fa2-4fdd-b2b7-e651e8518d44 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.693957] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 656.693957] env[61629]: value = "task-1354036" [ 656.693957] env[61629]: _type = "Task" [ 656.693957] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.709752] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354036, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.713255] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4fbfed-ffca-4d49-a1d1-8eec04af34f0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.719888] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f387f45-d51d-4e6a-ad02-1d88597aa635 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.752104] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b52b2b-8c9f-422c-995f-8eca746d6327 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.759433] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd534220-ad56-4ab1-b326-808a9eb08d2a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.775262] env[61629]: DEBUG nova.compute.provider_tree [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.921637] env[61629]: INFO nova.compute.manager [-] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Took 1.02 seconds to deallocate network for instance. [ 656.924218] env[61629]: DEBUG nova.compute.claims [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 656.924400] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.948910] env[61629]: DEBUG nova.network.neutron [req-6da158d8-3989-4991-95bb-f17607fe50b8 req-b49e76c0-4900-4573-9349-380f862eab1d service nova] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 657.039144] env[61629]: DEBUG nova.network.neutron [req-6da158d8-3989-4991-95bb-f17607fe50b8 req-b49e76c0-4900-4573-9349-380f862eab1d service nova] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.083844] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.095939] env[61629]: ERROR nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 15d38b73-42a2-4d0b-9550-84f7c88392ee, please check neutron logs for more information. [ 657.095939] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 657.095939] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 657.095939] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 657.095939] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 657.095939] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 657.095939] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 657.095939] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 657.095939] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 657.095939] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 657.095939] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 657.095939] env[61629]: ERROR nova.compute.manager raise self.value [ 657.095939] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 657.095939] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 657.095939] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 657.095939] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 657.096402] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 657.096402] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 657.096402] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 15d38b73-42a2-4d0b-9550-84f7c88392ee, please check neutron logs for more information. [ 657.096402] env[61629]: ERROR nova.compute.manager [ 657.096402] env[61629]: Traceback (most recent call last): [ 657.096402] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 657.096402] env[61629]: listener.cb(fileno) [ 657.096402] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 657.096402] env[61629]: result = function(*args, **kwargs) [ 657.096402] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 657.096402] env[61629]: return func(*args, **kwargs) [ 657.096402] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 657.096402] env[61629]: raise e [ 657.096402] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 657.096402] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 657.096402] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 657.096402] env[61629]: created_port_ids = self._update_ports_for_instance( [ 657.096402] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 657.096402] env[61629]: with excutils.save_and_reraise_exception(): [ 657.096402] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 657.096402] env[61629]: self.force_reraise() [ 657.096402] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 657.096402] env[61629]: raise self.value [ 657.096402] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 657.096402] env[61629]: updated_port = self._update_port( [ 657.096402] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 657.096402] env[61629]: _ensure_no_port_binding_failure(port) [ 657.096402] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 657.096402] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 657.097201] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 15d38b73-42a2-4d0b-9550-84f7c88392ee, please check neutron logs for more information. [ 657.097201] env[61629]: Removing descriptor: 15 [ 657.204323] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354036, 'name': PowerOffVM_Task, 'duration_secs': 0.123024} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.204597] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 657.204810] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 657.205642] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877d0c4b-49ac-4a23-8de1-57621d48f053 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.212079] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 657.212303] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-baf658a7-1e1a-4b87-b247-a2affc64a701 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.236031] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 657.236031] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 657.236209] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Deleting the datastore file [datastore2] 2315bd37-6151-42d7-8b54-9ee367be0ed1 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 657.236465] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-421f5a26-b478-454d-8018-2341b9c0f9fb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.243085] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 657.243085] env[61629]: value = "task-1354039" [ 657.243085] env[61629]: _type = "Task" [ 657.243085] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.250853] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354039, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.280118] env[61629]: DEBUG nova.scheduler.client.report [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 657.292030] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 657.319595] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 657.320074] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 657.320215] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 657.320478] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 657.320684] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 657.321373] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 657.321373] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 657.321518] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 657.322225] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 657.322493] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 657.322749] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 657.323961] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f980d03-6e90-47fb-bd9a-dd7509d2978c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.334766] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c8ea9c-c9d9-42c0-b111-a94dd405da12 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.352081] env[61629]: ERROR nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 15d38b73-42a2-4d0b-9550-84f7c88392ee, please check neutron logs for more information. [ 657.352081] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Traceback (most recent call last): [ 657.352081] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 657.352081] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] yield resources [ 657.352081] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 657.352081] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] self.driver.spawn(context, instance, image_meta, [ 657.352081] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 657.352081] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 657.352081] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 657.352081] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] vm_ref = self.build_virtual_machine(instance, [ 657.352081] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 657.352627] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] vif_infos = vmwarevif.get_vif_info(self._session, [ 657.352627] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 657.352627] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] for vif in network_info: [ 657.352627] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 657.352627] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] return self._sync_wrapper(fn, *args, **kwargs) [ 657.352627] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 657.352627] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] self.wait() [ 657.352627] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 657.352627] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] self[:] = self._gt.wait() [ 657.352627] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 657.352627] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] return self._exit_event.wait() [ 657.352627] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 657.352627] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] current.throw(*self._exc) [ 657.353391] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 657.353391] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] result = function(*args, **kwargs) [ 657.353391] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 657.353391] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] return func(*args, **kwargs) [ 657.353391] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 657.353391] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] raise e [ 657.353391] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 657.353391] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] nwinfo = self.network_api.allocate_for_instance( [ 657.353391] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 657.353391] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] created_port_ids = self._update_ports_for_instance( [ 657.353391] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 657.353391] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] with excutils.save_and_reraise_exception(): [ 657.353391] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 657.354402] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] self.force_reraise() [ 657.354402] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 657.354402] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] raise self.value [ 657.354402] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 657.354402] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] updated_port = self._update_port( [ 657.354402] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 657.354402] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] _ensure_no_port_binding_failure(port) [ 657.354402] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 657.354402] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] raise exception.PortBindingFailed(port_id=port['id']) [ 657.354402] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] nova.exception.PortBindingFailed: Binding failed for port 15d38b73-42a2-4d0b-9550-84f7c88392ee, please check neutron logs for more information. [ 657.354402] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] [ 657.354402] env[61629]: INFO nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Terminating instance [ 657.355072] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "refresh_cache-26366e41-de20-432b-a37e-5abb07c4ff8d" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.355072] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquired lock "refresh_cache-26366e41-de20-432b-a37e-5abb07c4ff8d" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.355072] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 657.542202] env[61629]: DEBUG oslo_concurrency.lockutils [req-6da158d8-3989-4991-95bb-f17607fe50b8 req-b49e76c0-4900-4573-9349-380f862eab1d service nova] Releasing lock "refresh_cache-6ba7ca7d-173d-41d3-b523-3548a67397c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.542482] env[61629]: DEBUG nova.compute.manager [req-6da158d8-3989-4991-95bb-f17607fe50b8 req-b49e76c0-4900-4573-9349-380f862eab1d service nova] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Received event network-vif-deleted-ccaa17cb-d02f-4f92-bf54-7140be7e5cd8 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 657.754043] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354039, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099731} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.754227] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 657.754418] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 657.754626] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 657.784721] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.526s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.785225] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 657.790015] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.365s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.790015] env[61629]: INFO nova.compute.claims [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.877015] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 657.974687] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.299355] env[61629]: DEBUG nova.compute.utils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 658.300680] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 658.300846] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 658.350980] env[61629]: DEBUG nova.policy [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a53b3957c45f4788932118a04e52b7c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '433bada53f7f4020b3cd034c9645c776', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 658.455371] env[61629]: DEBUG nova.compute.manager [req-13db7014-a75c-466f-89c8-171b7bb7e1cb req-c7764914-b398-4cbf-b523-8318305bf3ae service nova] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Received event network-changed-15d38b73-42a2-4d0b-9550-84f7c88392ee {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 658.455446] env[61629]: DEBUG nova.compute.manager [req-13db7014-a75c-466f-89c8-171b7bb7e1cb req-c7764914-b398-4cbf-b523-8318305bf3ae service nova] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Refreshing instance network info cache due to event network-changed-15d38b73-42a2-4d0b-9550-84f7c88392ee. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 658.455642] env[61629]: DEBUG oslo_concurrency.lockutils [req-13db7014-a75c-466f-89c8-171b7bb7e1cb req-c7764914-b398-4cbf-b523-8318305bf3ae service nova] Acquiring lock "refresh_cache-26366e41-de20-432b-a37e-5abb07c4ff8d" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.480234] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Releasing lock "refresh_cache-26366e41-de20-432b-a37e-5abb07c4ff8d" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.480234] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 658.480234] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 658.480234] env[61629]: DEBUG oslo_concurrency.lockutils [req-13db7014-a75c-466f-89c8-171b7bb7e1cb req-c7764914-b398-4cbf-b523-8318305bf3ae service nova] Acquired lock "refresh_cache-26366e41-de20-432b-a37e-5abb07c4ff8d" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.480234] env[61629]: DEBUG nova.network.neutron [req-13db7014-a75c-466f-89c8-171b7bb7e1cb req-c7764914-b398-4cbf-b523-8318305bf3ae service nova] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Refreshing network info cache for port 15d38b73-42a2-4d0b-9550-84f7c88392ee {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 658.480409] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4745936a-d866-4283-85b1-51314a0a786e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.490718] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e441fc-e716-42e4-a863-46f2223c3291 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.518573] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 26366e41-de20-432b-a37e-5abb07c4ff8d could not be found. [ 658.518573] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 658.518573] env[61629]: INFO nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 658.518573] env[61629]: DEBUG oslo.service.loopingcall [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 658.518573] env[61629]: DEBUG nova.compute.manager [-] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 658.518573] env[61629]: DEBUG nova.network.neutron [-] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 658.534800] env[61629]: DEBUG nova.network.neutron [-] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.641450] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Successfully created port: 70469a0c-33ae-40da-a372-fd6053023643 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 658.792390] env[61629]: DEBUG nova.virt.hardware [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 658.792390] env[61629]: DEBUG nova.virt.hardware [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 658.792390] env[61629]: DEBUG nova.virt.hardware [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 658.792390] env[61629]: DEBUG nova.virt.hardware [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 658.792627] env[61629]: DEBUG nova.virt.hardware [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 658.792627] env[61629]: DEBUG nova.virt.hardware [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 658.792627] env[61629]: DEBUG nova.virt.hardware [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 658.792732] env[61629]: DEBUG nova.virt.hardware [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 658.792928] env[61629]: DEBUG nova.virt.hardware [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 658.793416] env[61629]: DEBUG nova.virt.hardware [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 658.793631] env[61629]: DEBUG nova.virt.hardware [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 658.794532] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebcb895-1a4d-4bc9-ba8e-3fc7cff107c8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.807427] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 658.814336] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e86d2afd-4e92-4ab9-8b28-c3ebe80860bc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.829540] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Instance VIF info [] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 658.835732] env[61629]: DEBUG oslo.service.loopingcall [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 658.835970] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 658.836199] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4bcab94-ba64-40bf-b45a-72d4575d8d4e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.862807] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 658.862807] env[61629]: value = "task-1354040" [ 658.862807] env[61629]: _type = "Task" [ 658.862807] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.875339] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354040, 'name': CreateVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.007553] env[61629]: DEBUG nova.network.neutron [req-13db7014-a75c-466f-89c8-171b7bb7e1cb req-c7764914-b398-4cbf-b523-8318305bf3ae service nova] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 659.036079] env[61629]: DEBUG nova.network.neutron [-] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.125662] env[61629]: DEBUG nova.network.neutron [req-13db7014-a75c-466f-89c8-171b7bb7e1cb req-c7764914-b398-4cbf-b523-8318305bf3ae service nova] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.297056] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2cb4411-a6c8-4b16-85b1-7ad0cec2277d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.305144] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc972e9-da06-496b-916a-08543247f513 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.338738] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144d0aa3-30ed-453b-ab18-984d890ebe22 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.346651] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc6fd31-9be2-41da-8b51-3f8ea239f78f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.359659] env[61629]: DEBUG nova.compute.provider_tree [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.371139] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354040, 'name': CreateVM_Task, 'duration_secs': 0.245044} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.372404] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 659.372862] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.373055] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.373384] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 659.373866] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a1e0de0-e136-49a9-ab7f-47502af3c715 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.379518] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 659.379518] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]520efb55-b746-b865-3265-1b47b9ebe4c6" [ 659.379518] env[61629]: _type = "Task" [ 659.379518] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.388237] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]520efb55-b746-b865-3265-1b47b9ebe4c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.542144] env[61629]: INFO nova.compute.manager [-] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Took 1.02 seconds to deallocate network for instance. [ 659.544823] env[61629]: DEBUG nova.compute.claims [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 659.545011] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.629129] env[61629]: DEBUG oslo_concurrency.lockutils [req-13db7014-a75c-466f-89c8-171b7bb7e1cb req-c7764914-b398-4cbf-b523-8318305bf3ae service nova] Releasing lock "refresh_cache-26366e41-de20-432b-a37e-5abb07c4ff8d" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.629416] env[61629]: DEBUG nova.compute.manager [req-13db7014-a75c-466f-89c8-171b7bb7e1cb req-c7764914-b398-4cbf-b523-8318305bf3ae service nova] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Received event network-vif-deleted-15d38b73-42a2-4d0b-9550-84f7c88392ee {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 659.827630] env[61629]: ERROR nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 70469a0c-33ae-40da-a372-fd6053023643, please check neutron logs for more information. [ 659.827630] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 659.827630] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 659.827630] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 659.827630] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 659.827630] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 659.827630] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 659.827630] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 659.827630] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.827630] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 659.827630] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.827630] env[61629]: ERROR nova.compute.manager raise self.value [ 659.827630] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 659.827630] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 659.827630] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.827630] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 659.828308] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.828308] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 659.828308] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 70469a0c-33ae-40da-a372-fd6053023643, please check neutron logs for more information. [ 659.828308] env[61629]: ERROR nova.compute.manager [ 659.828308] env[61629]: Traceback (most recent call last): [ 659.828308] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 659.828308] env[61629]: listener.cb(fileno) [ 659.828308] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 659.828308] env[61629]: result = function(*args, **kwargs) [ 659.828308] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 659.828308] env[61629]: return func(*args, **kwargs) [ 659.828308] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 659.828308] env[61629]: raise e [ 659.828308] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 659.828308] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 659.828308] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 659.828308] env[61629]: created_port_ids = self._update_ports_for_instance( [ 659.828308] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 659.828308] env[61629]: with excutils.save_and_reraise_exception(): [ 659.828308] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.828308] env[61629]: self.force_reraise() [ 659.828308] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.828308] env[61629]: raise self.value [ 659.828308] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 659.828308] env[61629]: updated_port = self._update_port( [ 659.828308] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.828308] env[61629]: _ensure_no_port_binding_failure(port) [ 659.828308] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.828308] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 659.829477] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 70469a0c-33ae-40da-a372-fd6053023643, please check neutron logs for more information. [ 659.829477] env[61629]: Removing descriptor: 15 [ 659.843368] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 659.862352] env[61629]: DEBUG nova.scheduler.client.report [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 659.873329] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 659.873724] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 659.873724] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 659.873908] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 659.874045] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 659.874232] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 659.874445] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 659.874600] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 659.874763] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 659.874924] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 659.875117] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 659.875973] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087af32f-f843-47ef-bd8a-14e5571b4a5c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.886469] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7eae9ee-6acd-4ae0-9617-08387070f301 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.895270] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]520efb55-b746-b865-3265-1b47b9ebe4c6, 'name': SearchDatastore_Task, 'duration_secs': 0.009148} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.896139] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.896432] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 659.896676] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.896824] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.897027] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 659.907055] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a05fdd5-f19f-4fee-a5e8-30f595d33da9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.909451] env[61629]: ERROR nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 70469a0c-33ae-40da-a372-fd6053023643, please check neutron logs for more information. [ 659.909451] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Traceback (most recent call last): [ 659.909451] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 659.909451] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] yield resources [ 659.909451] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 659.909451] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] self.driver.spawn(context, instance, image_meta, [ 659.909451] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 659.909451] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 659.909451] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 659.909451] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] vm_ref = self.build_virtual_machine(instance, [ 659.909451] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 659.909834] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] vif_infos = vmwarevif.get_vif_info(self._session, [ 659.909834] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 659.909834] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] for vif in network_info: [ 659.909834] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 659.909834] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] return self._sync_wrapper(fn, *args, **kwargs) [ 659.909834] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 659.909834] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] self.wait() [ 659.909834] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 659.909834] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] self[:] = self._gt.wait() [ 659.909834] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 659.909834] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] return self._exit_event.wait() [ 659.909834] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 659.909834] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] current.throw(*self._exc) [ 659.910238] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 659.910238] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] result = function(*args, **kwargs) [ 659.910238] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 659.910238] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] return func(*args, **kwargs) [ 659.910238] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 659.910238] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] raise e [ 659.910238] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 659.910238] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] nwinfo = self.network_api.allocate_for_instance( [ 659.910238] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 659.910238] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] created_port_ids = self._update_ports_for_instance( [ 659.910238] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 659.910238] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] with excutils.save_and_reraise_exception(): [ 659.910238] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.910653] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] self.force_reraise() [ 659.910653] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.910653] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] raise self.value [ 659.910653] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 659.910653] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] updated_port = self._update_port( [ 659.910653] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.910653] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] _ensure_no_port_binding_failure(port) [ 659.910653] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.910653] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] raise exception.PortBindingFailed(port_id=port['id']) [ 659.910653] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] nova.exception.PortBindingFailed: Binding failed for port 70469a0c-33ae-40da-a372-fd6053023643, please check neutron logs for more information. [ 659.910653] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] [ 659.910653] env[61629]: INFO nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Terminating instance [ 659.911957] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "refresh_cache-5670d64c-bddc-4b4a-bdf0-2b039be5e49e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.912153] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquired lock "refresh_cache-5670d64c-bddc-4b4a-bdf0-2b039be5e49e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.912322] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 659.917729] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 659.917904] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 659.918626] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9b938dd-8d8a-46a7-aa9b-20fa3f00d321 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.924184] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 659.924184] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52cd8dab-357f-04b7-ae73-6b4d66493bd2" [ 659.924184] env[61629]: _type = "Task" [ 659.924184] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.933057] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52cd8dab-357f-04b7-ae73-6b4d66493bd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.369387] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.581s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.373019] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 660.373019] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.947s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.374343] env[61629]: INFO nova.compute.claims [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.429450] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 660.435322] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52cd8dab-357f-04b7-ae73-6b4d66493bd2, 'name': SearchDatastore_Task, 'duration_secs': 0.00803} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.436132] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e8ed617-cab2-4b49-935b-39ee3dbaa3a2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.440799] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 660.440799] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52d05c6b-78da-4c97-82c7-3398556d25d2" [ 660.440799] env[61629]: _type = "Task" [ 660.440799] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.448145] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d05c6b-78da-4c97-82c7-3398556d25d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.480320] env[61629]: DEBUG nova.compute.manager [req-3ea3cfb9-1753-461c-b518-c674878c12b4 req-4cb10104-2932-4d95-9a9c-83ac939763ca service nova] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Received event network-changed-70469a0c-33ae-40da-a372-fd6053023643 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 660.480516] env[61629]: DEBUG nova.compute.manager [req-3ea3cfb9-1753-461c-b518-c674878c12b4 req-4cb10104-2932-4d95-9a9c-83ac939763ca service nova] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Refreshing instance network info cache due to event network-changed-70469a0c-33ae-40da-a372-fd6053023643. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 660.480704] env[61629]: DEBUG oslo_concurrency.lockutils [req-3ea3cfb9-1753-461c-b518-c674878c12b4 req-4cb10104-2932-4d95-9a9c-83ac939763ca service nova] Acquiring lock "refresh_cache-5670d64c-bddc-4b4a-bdf0-2b039be5e49e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.533743] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.878303] env[61629]: DEBUG nova.compute.utils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 660.882310] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 660.882518] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 660.931271] env[61629]: DEBUG nova.policy [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a53b3957c45f4788932118a04e52b7c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '433bada53f7f4020b3cd034c9645c776', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 660.957640] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d05c6b-78da-4c97-82c7-3398556d25d2, 'name': SearchDatastore_Task, 'duration_secs': 0.027866} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.958071] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.958478] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 2315bd37-6151-42d7-8b54-9ee367be0ed1/2315bd37-6151-42d7-8b54-9ee367be0ed1.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 660.958940] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62925be9-5ec9-4165-ab05-8d1e92a1402e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.970948] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 660.970948] env[61629]: value = "task-1354042" [ 660.970948] env[61629]: _type = "Task" [ 660.970948] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.983227] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354042, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.036459] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Releasing lock "refresh_cache-5670d64c-bddc-4b4a-bdf0-2b039be5e49e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.037217] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 661.037567] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 661.038093] env[61629]: DEBUG oslo_concurrency.lockutils [req-3ea3cfb9-1753-461c-b518-c674878c12b4 req-4cb10104-2932-4d95-9a9c-83ac939763ca service nova] Acquired lock "refresh_cache-5670d64c-bddc-4b4a-bdf0-2b039be5e49e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.038382] env[61629]: DEBUG nova.network.neutron [req-3ea3cfb9-1753-461c-b518-c674878c12b4 req-4cb10104-2932-4d95-9a9c-83ac939763ca service nova] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Refreshing network info cache for port 70469a0c-33ae-40da-a372-fd6053023643 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 661.040134] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dbb830d9-7974-4dfd-bf41-a10b8770e064 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.055233] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e85fb49-801f-4edf-8896-4d3574dde363 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.094160] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5670d64c-bddc-4b4a-bdf0-2b039be5e49e could not be found. [ 661.094742] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 661.095095] env[61629]: INFO nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Took 0.06 seconds to destroy the instance on the hypervisor. [ 661.095534] env[61629]: DEBUG oslo.service.loopingcall [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 661.095949] env[61629]: DEBUG nova.compute.manager [-] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 661.096153] env[61629]: DEBUG nova.network.neutron [-] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 661.113852] env[61629]: DEBUG nova.network.neutron [-] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.302079] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Successfully created port: ba3af36d-c7aa-42c6-8578-0a1e6010675e {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 661.383404] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 661.491983] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354042, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.572199] env[61629]: DEBUG nova.network.neutron [req-3ea3cfb9-1753-461c-b518-c674878c12b4 req-4cb10104-2932-4d95-9a9c-83ac939763ca service nova] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.622593] env[61629]: DEBUG nova.network.neutron [-] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.672708] env[61629]: DEBUG nova.network.neutron [req-3ea3cfb9-1753-461c-b518-c674878c12b4 req-4cb10104-2932-4d95-9a9c-83ac939763ca service nova] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.985308] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354042, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.567639} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.985619] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 2315bd37-6151-42d7-8b54-9ee367be0ed1/2315bd37-6151-42d7-8b54-9ee367be0ed1.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 661.985901] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 661.986361] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f0e70c6-9f7b-44a4-8928-cc5a79bff016 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.994567] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 661.994567] env[61629]: value = "task-1354044" [ 661.994567] env[61629]: _type = "Task" [ 661.994567] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.007136] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354044, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.018269] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f836b907-ce14-48e7-8ed6-61dbc055c0ff {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.026192] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e124db-1d73-421b-b5ea-db6d25560d69 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.060419] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073e83af-e374-4274-abb3-8e7661428185 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.069635] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba91ef2d-1467-4ded-8293-6da04413a80b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.085669] env[61629]: DEBUG nova.compute.provider_tree [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.127141] env[61629]: INFO nova.compute.manager [-] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Took 1.03 seconds to deallocate network for instance. [ 662.129385] env[61629]: DEBUG nova.compute.claims [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 662.129563] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.175793] env[61629]: DEBUG oslo_concurrency.lockutils [req-3ea3cfb9-1753-461c-b518-c674878c12b4 req-4cb10104-2932-4d95-9a9c-83ac939763ca service nova] Releasing lock "refresh_cache-5670d64c-bddc-4b4a-bdf0-2b039be5e49e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.176115] env[61629]: DEBUG nova.compute.manager [req-3ea3cfb9-1753-461c-b518-c674878c12b4 req-4cb10104-2932-4d95-9a9c-83ac939763ca service nova] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Received event network-vif-deleted-70469a0c-33ae-40da-a372-fd6053023643 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 662.386829] env[61629]: ERROR nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ba3af36d-c7aa-42c6-8578-0a1e6010675e, please check neutron logs for more information. [ 662.386829] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 662.386829] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 662.386829] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 662.386829] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.386829] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 662.386829] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.386829] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 662.386829] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.386829] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 662.386829] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.386829] env[61629]: ERROR nova.compute.manager raise self.value [ 662.386829] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.386829] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 662.386829] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.386829] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 662.387683] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.387683] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 662.387683] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ba3af36d-c7aa-42c6-8578-0a1e6010675e, please check neutron logs for more information. [ 662.387683] env[61629]: ERROR nova.compute.manager [ 662.387683] env[61629]: Traceback (most recent call last): [ 662.387683] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 662.387683] env[61629]: listener.cb(fileno) [ 662.387683] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.387683] env[61629]: result = function(*args, **kwargs) [ 662.387683] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 662.387683] env[61629]: return func(*args, **kwargs) [ 662.387683] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 662.387683] env[61629]: raise e [ 662.387683] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 662.387683] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 662.387683] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.387683] env[61629]: created_port_ids = self._update_ports_for_instance( [ 662.387683] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.387683] env[61629]: with excutils.save_and_reraise_exception(): [ 662.387683] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.387683] env[61629]: self.force_reraise() [ 662.387683] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.387683] env[61629]: raise self.value [ 662.387683] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.387683] env[61629]: updated_port = self._update_port( [ 662.387683] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.387683] env[61629]: _ensure_no_port_binding_failure(port) [ 662.387683] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.387683] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 662.388720] env[61629]: nova.exception.PortBindingFailed: Binding failed for port ba3af36d-c7aa-42c6-8578-0a1e6010675e, please check neutron logs for more information. [ 662.388720] env[61629]: Removing descriptor: 15 [ 662.403545] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 662.431993] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 662.432421] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 662.432682] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.432984] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 662.433264] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.433528] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 662.433839] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 662.434115] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 662.434389] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 662.435174] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 662.435483] env[61629]: DEBUG nova.virt.hardware [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 662.436472] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c4ac56-010d-44b5-b00d-a60143e6b390 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.446676] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0992fbbe-1705-4fed-8605-f6d0ed60a04c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.461703] env[61629]: ERROR nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ba3af36d-c7aa-42c6-8578-0a1e6010675e, please check neutron logs for more information. [ 662.461703] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Traceback (most recent call last): [ 662.461703] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 662.461703] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] yield resources [ 662.461703] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 662.461703] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] self.driver.spawn(context, instance, image_meta, [ 662.461703] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 662.461703] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] self._vmops.spawn(context, instance, image_meta, injected_files, [ 662.461703] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 662.461703] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] vm_ref = self.build_virtual_machine(instance, [ 662.461703] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 662.462102] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] vif_infos = vmwarevif.get_vif_info(self._session, [ 662.462102] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 662.462102] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] for vif in network_info: [ 662.462102] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 662.462102] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] return self._sync_wrapper(fn, *args, **kwargs) [ 662.462102] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 662.462102] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] self.wait() [ 662.462102] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 662.462102] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] self[:] = self._gt.wait() [ 662.462102] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 662.462102] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] return self._exit_event.wait() [ 662.462102] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 662.462102] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] current.throw(*self._exc) [ 662.462416] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.462416] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] result = function(*args, **kwargs) [ 662.462416] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 662.462416] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] return func(*args, **kwargs) [ 662.462416] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 662.462416] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] raise e [ 662.462416] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 662.462416] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] nwinfo = self.network_api.allocate_for_instance( [ 662.462416] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.462416] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] created_port_ids = self._update_ports_for_instance( [ 662.462416] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.462416] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] with excutils.save_and_reraise_exception(): [ 662.462416] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.462742] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] self.force_reraise() [ 662.462742] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.462742] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] raise self.value [ 662.462742] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.462742] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] updated_port = self._update_port( [ 662.462742] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.462742] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] _ensure_no_port_binding_failure(port) [ 662.462742] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.462742] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] raise exception.PortBindingFailed(port_id=port['id']) [ 662.462742] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] nova.exception.PortBindingFailed: Binding failed for port ba3af36d-c7aa-42c6-8578-0a1e6010675e, please check neutron logs for more information. [ 662.462742] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] [ 662.462742] env[61629]: INFO nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Terminating instance [ 662.464120] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "refresh_cache-67534b42-bfab-49a0-922d-8a79a13995db" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.464307] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquired lock "refresh_cache-67534b42-bfab-49a0-922d-8a79a13995db" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.464473] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 662.505443] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354044, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.203792} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.506571] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 662.507803] env[61629]: DEBUG nova.compute.manager [req-9fc04c98-f416-45e8-b90a-1d7bc5aeee40 req-7e84d495-5f69-4948-9f93-6d7491bb1c7e service nova] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Received event network-changed-ba3af36d-c7aa-42c6-8578-0a1e6010675e {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 662.507953] env[61629]: DEBUG nova.compute.manager [req-9fc04c98-f416-45e8-b90a-1d7bc5aeee40 req-7e84d495-5f69-4948-9f93-6d7491bb1c7e service nova] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Refreshing instance network info cache due to event network-changed-ba3af36d-c7aa-42c6-8578-0a1e6010675e. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 662.508276] env[61629]: DEBUG oslo_concurrency.lockutils [req-9fc04c98-f416-45e8-b90a-1d7bc5aeee40 req-7e84d495-5f69-4948-9f93-6d7491bb1c7e service nova] Acquiring lock "refresh_cache-67534b42-bfab-49a0-922d-8a79a13995db" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.508969] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a30556-843f-4ef3-8ae4-515912c2e769 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.530108] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] 2315bd37-6151-42d7-8b54-9ee367be0ed1/2315bd37-6151-42d7-8b54-9ee367be0ed1.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 662.530585] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6a0cbcb-b877-45f3-96d3-30516193920b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.551973] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 662.551973] env[61629]: value = "task-1354045" [ 662.551973] env[61629]: _type = "Task" [ 662.551973] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.561663] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354045, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.589313] env[61629]: DEBUG nova.scheduler.client.report [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 662.980742] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.062495] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354045, 'name': ReconfigVM_Task, 'duration_secs': 0.274872} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.062495] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Reconfigured VM instance instance-0000001a to attach disk [datastore2] 2315bd37-6151-42d7-8b54-9ee367be0ed1/2315bd37-6151-42d7-8b54-9ee367be0ed1.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 663.062495] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8b5e4c0-b6d0-4d9d-bc02-5b0b306b783d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.070071] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 663.070071] env[61629]: value = "task-1354046" [ 663.070071] env[61629]: _type = "Task" [ 663.070071] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.078878] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354046, 'name': Rename_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.094678] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.722s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.095250] env[61629]: DEBUG nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 663.098616] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.099577] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.929s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.099775] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.100018] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61629) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 663.100903] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.200s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.105373] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92385ee4-d18f-49ee-a049-8126bebc894e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.115354] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e345c9bb-374a-46cb-9d84-5b1b06cbcfad {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.131178] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de298f04-365b-4cdf-ba42-68dffebfd116 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.146111] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4c3c3b-87f7-4479-96ba-d1d9cc2da096 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.180708] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181515MB free_disk=151GB free_vcpus=48 pci_devices=None {{(pid=61629) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 663.180708] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.580382] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354046, 'name': Rename_Task, 'duration_secs': 0.138193} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.580657] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 663.580892] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e1f174b-fd5a-4777-b86b-79dc01f5dfad {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.587702] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 663.587702] env[61629]: value = "task-1354048" [ 663.587702] env[61629]: _type = "Task" [ 663.587702] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.595450] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354048, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.607882] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Releasing lock "refresh_cache-67534b42-bfab-49a0-922d-8a79a13995db" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.608321] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 663.608561] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 663.609591] env[61629]: DEBUG nova.compute.utils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 663.611684] env[61629]: DEBUG oslo_concurrency.lockutils [req-9fc04c98-f416-45e8-b90a-1d7bc5aeee40 req-7e84d495-5f69-4948-9f93-6d7491bb1c7e service nova] Acquired lock "refresh_cache-67534b42-bfab-49a0-922d-8a79a13995db" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.611959] env[61629]: DEBUG nova.network.neutron [req-9fc04c98-f416-45e8-b90a-1d7bc5aeee40 req-7e84d495-5f69-4948-9f93-6d7491bb1c7e service nova] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Refreshing network info cache for port ba3af36d-c7aa-42c6-8578-0a1e6010675e {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 663.615027] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86da66dc-79ff-420b-84e5-64cfebc1cdaf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.616021] env[61629]: DEBUG nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 663.616212] env[61629]: DEBUG nova.network.neutron [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 663.627961] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f252949-dced-4855-8872-015f412f4d5c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.657157] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 67534b42-bfab-49a0-922d-8a79a13995db could not be found. [ 663.657399] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 663.657612] env[61629]: INFO nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Took 0.05 seconds to destroy the instance on the hypervisor. [ 663.657835] env[61629]: DEBUG oslo.service.loopingcall [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 663.660404] env[61629]: DEBUG nova.compute.manager [-] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 663.660502] env[61629]: DEBUG nova.network.neutron [-] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 663.670761] env[61629]: DEBUG nova.policy [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3cbfdc70fad64e8ab37fb9e0c1a10e0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bc538b7901b4d65a6107db047063183', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 663.678038] env[61629]: DEBUG nova.network.neutron [-] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.979155] env[61629]: DEBUG nova.network.neutron [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Successfully created port: b213872d-112d-4e10-92df-607c1c61e42c {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 664.029555] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79db82c2-8c5f-4323-a26b-4d19c9b041e1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.038105] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c632df39-dfab-4f1b-a360-5227e09ff63c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.071968] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3084ef57-28bd-41d2-90b3-5708be26cf76 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.079924] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4f00d8-a112-4067-abb0-861d097e2e80 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.094308] env[61629]: DEBUG nova.compute.provider_tree [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.107042] env[61629]: DEBUG oslo_vmware.api [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354048, 'name': PowerOnVM_Task, 'duration_secs': 0.461298} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.107042] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 664.107042] env[61629]: DEBUG nova.compute.manager [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 664.107042] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48af1df6-8502-41ca-8bb6-d95fcf64251c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.119288] env[61629]: DEBUG nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 664.131748] env[61629]: DEBUG nova.network.neutron [req-9fc04c98-f416-45e8-b90a-1d7bc5aeee40 req-7e84d495-5f69-4948-9f93-6d7491bb1c7e service nova] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.181077] env[61629]: DEBUG nova.network.neutron [-] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.233794] env[61629]: DEBUG nova.network.neutron [req-9fc04c98-f416-45e8-b90a-1d7bc5aeee40 req-7e84d495-5f69-4948-9f93-6d7491bb1c7e service nova] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.600506] env[61629]: DEBUG nova.scheduler.client.report [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 664.630169] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.684425] env[61629]: INFO nova.compute.manager [-] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Took 1.02 seconds to deallocate network for instance. [ 664.689513] env[61629]: DEBUG nova.compute.claims [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 664.689911] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.737897] env[61629]: DEBUG oslo_concurrency.lockutils [req-9fc04c98-f416-45e8-b90a-1d7bc5aeee40 req-7e84d495-5f69-4948-9f93-6d7491bb1c7e service nova] Releasing lock "refresh_cache-67534b42-bfab-49a0-922d-8a79a13995db" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.738202] env[61629]: DEBUG nova.compute.manager [req-9fc04c98-f416-45e8-b90a-1d7bc5aeee40 req-7e84d495-5f69-4948-9f93-6d7491bb1c7e service nova] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Received event network-vif-deleted-ba3af36d-c7aa-42c6-8578-0a1e6010675e {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 664.816716] env[61629]: DEBUG nova.compute.manager [req-6c7fcc39-4c7c-48da-bb84-73a19f160021 req-016b219e-7551-4cb0-bf51-e7b6c7996a82 service nova] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Received event network-changed-b213872d-112d-4e10-92df-607c1c61e42c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 664.816974] env[61629]: DEBUG nova.compute.manager [req-6c7fcc39-4c7c-48da-bb84-73a19f160021 req-016b219e-7551-4cb0-bf51-e7b6c7996a82 service nova] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Refreshing instance network info cache due to event network-changed-b213872d-112d-4e10-92df-607c1c61e42c. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 664.817204] env[61629]: DEBUG oslo_concurrency.lockutils [req-6c7fcc39-4c7c-48da-bb84-73a19f160021 req-016b219e-7551-4cb0-bf51-e7b6c7996a82 service nova] Acquiring lock "refresh_cache-cbcb5b42-06ab-41e4-ad08-d285b0863bfb" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.817346] env[61629]: DEBUG oslo_concurrency.lockutils [req-6c7fcc39-4c7c-48da-bb84-73a19f160021 req-016b219e-7551-4cb0-bf51-e7b6c7996a82 service nova] Acquired lock "refresh_cache-cbcb5b42-06ab-41e4-ad08-d285b0863bfb" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.817570] env[61629]: DEBUG nova.network.neutron [req-6c7fcc39-4c7c-48da-bb84-73a19f160021 req-016b219e-7551-4cb0-bf51-e7b6c7996a82 service nova] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Refreshing network info cache for port b213872d-112d-4e10-92df-607c1c61e42c {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 665.001243] env[61629]: ERROR nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b213872d-112d-4e10-92df-607c1c61e42c, please check neutron logs for more information. [ 665.001243] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 665.001243] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 665.001243] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 665.001243] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 665.001243] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 665.001243] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 665.001243] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 665.001243] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.001243] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 665.001243] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.001243] env[61629]: ERROR nova.compute.manager raise self.value [ 665.001243] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 665.001243] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 665.001243] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 665.001243] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 665.002264] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 665.002264] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 665.002264] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b213872d-112d-4e10-92df-607c1c61e42c, please check neutron logs for more information. [ 665.002264] env[61629]: ERROR nova.compute.manager [ 665.002264] env[61629]: Traceback (most recent call last): [ 665.002264] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 665.002264] env[61629]: listener.cb(fileno) [ 665.002264] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 665.002264] env[61629]: result = function(*args, **kwargs) [ 665.002264] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 665.002264] env[61629]: return func(*args, **kwargs) [ 665.002264] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 665.002264] env[61629]: raise e [ 665.002264] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 665.002264] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 665.002264] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 665.002264] env[61629]: created_port_ids = self._update_ports_for_instance( [ 665.002264] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 665.002264] env[61629]: with excutils.save_and_reraise_exception(): [ 665.002264] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.002264] env[61629]: self.force_reraise() [ 665.002264] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.002264] env[61629]: raise self.value [ 665.002264] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 665.002264] env[61629]: updated_port = self._update_port( [ 665.002264] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 665.002264] env[61629]: _ensure_no_port_binding_failure(port) [ 665.002264] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 665.002264] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 665.003650] env[61629]: nova.exception.PortBindingFailed: Binding failed for port b213872d-112d-4e10-92df-607c1c61e42c, please check neutron logs for more information. [ 665.003650] env[61629]: Removing descriptor: 15 [ 665.076737] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "2315bd37-6151-42d7-8b54-9ee367be0ed1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.077019] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "2315bd37-6151-42d7-8b54-9ee367be0ed1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.077245] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "2315bd37-6151-42d7-8b54-9ee367be0ed1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.077434] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "2315bd37-6151-42d7-8b54-9ee367be0ed1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.077710] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "2315bd37-6151-42d7-8b54-9ee367be0ed1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.081040] env[61629]: INFO nova.compute.manager [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Terminating instance [ 665.083840] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "refresh_cache-2315bd37-6151-42d7-8b54-9ee367be0ed1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.084011] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquired lock "refresh_cache-2315bd37-6151-42d7-8b54-9ee367be0ed1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.084201] env[61629]: DEBUG nova.network.neutron [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 665.108039] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.007s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.108778] env[61629]: ERROR nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 683a0eb5-ccf5-4e74-9beb-82cbf25490b5, please check neutron logs for more information. [ 665.108778] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Traceback (most recent call last): [ 665.108778] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 665.108778] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] self.driver.spawn(context, instance, image_meta, [ 665.108778] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 665.108778] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 665.108778] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 665.108778] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] vm_ref = self.build_virtual_machine(instance, [ 665.108778] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 665.108778] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] vif_infos = vmwarevif.get_vif_info(self._session, [ 665.108778] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 665.109139] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] for vif in network_info: [ 665.109139] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 665.109139] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] return self._sync_wrapper(fn, *args, **kwargs) [ 665.109139] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 665.109139] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] self.wait() [ 665.109139] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 665.109139] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] self[:] = self._gt.wait() [ 665.109139] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 665.109139] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] return self._exit_event.wait() [ 665.109139] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 665.109139] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] result = hub.switch() [ 665.109139] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 665.109139] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] return self.greenlet.switch() [ 665.109434] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 665.109434] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] result = function(*args, **kwargs) [ 665.109434] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 665.109434] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] return func(*args, **kwargs) [ 665.109434] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 665.109434] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] raise e [ 665.109434] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 665.109434] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] nwinfo = self.network_api.allocate_for_instance( [ 665.109434] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 665.109434] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] created_port_ids = self._update_ports_for_instance( [ 665.109434] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 665.109434] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] with excutils.save_and_reraise_exception(): [ 665.109434] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.109731] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] self.force_reraise() [ 665.109731] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.109731] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] raise self.value [ 665.109731] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 665.109731] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] updated_port = self._update_port( [ 665.109731] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 665.109731] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] _ensure_no_port_binding_failure(port) [ 665.109731] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 665.109731] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] raise exception.PortBindingFailed(port_id=port['id']) [ 665.109731] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] nova.exception.PortBindingFailed: Binding failed for port 683a0eb5-ccf5-4e74-9beb-82cbf25490b5, please check neutron logs for more information. [ 665.109731] env[61629]: ERROR nova.compute.manager [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] [ 665.110366] env[61629]: DEBUG nova.compute.utils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Binding failed for port 683a0eb5-ccf5-4e74-9beb-82cbf25490b5, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 665.110560] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.938s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.113901] env[61629]: DEBUG nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Build of instance d43d47a2-a27b-4bb8-9421-61805064a3d2 was re-scheduled: Binding failed for port 683a0eb5-ccf5-4e74-9beb-82cbf25490b5, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 665.114343] env[61629]: DEBUG nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 665.114560] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Acquiring lock "refresh_cache-d43d47a2-a27b-4bb8-9421-61805064a3d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.114704] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Acquired lock "refresh_cache-d43d47a2-a27b-4bb8-9421-61805064a3d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.114864] env[61629]: DEBUG nova.network.neutron [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 665.128763] env[61629]: DEBUG nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 665.158148] env[61629]: DEBUG nova.virt.hardware [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 665.158409] env[61629]: DEBUG nova.virt.hardware [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 665.158565] env[61629]: DEBUG nova.virt.hardware [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 665.158740] env[61629]: DEBUG nova.virt.hardware [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 665.158881] env[61629]: DEBUG nova.virt.hardware [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 665.159034] env[61629]: DEBUG nova.virt.hardware [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 665.159249] env[61629]: DEBUG nova.virt.hardware [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 665.159987] env[61629]: DEBUG nova.virt.hardware [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 665.159987] env[61629]: DEBUG nova.virt.hardware [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 665.159987] env[61629]: DEBUG nova.virt.hardware [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 665.159987] env[61629]: DEBUG nova.virt.hardware [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 665.160977] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-308b4f65-0b72-44ef-9aed-2b3f4f35ae11 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.170794] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbaa545-5318-4eec-8364-05cfb357fbf1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.185931] env[61629]: ERROR nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b213872d-112d-4e10-92df-607c1c61e42c, please check neutron logs for more information. [ 665.185931] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Traceback (most recent call last): [ 665.185931] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 665.185931] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] yield resources [ 665.185931] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 665.185931] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] self.driver.spawn(context, instance, image_meta, [ 665.185931] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 665.185931] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 665.185931] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 665.185931] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] vm_ref = self.build_virtual_machine(instance, [ 665.185931] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 665.186240] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] vif_infos = vmwarevif.get_vif_info(self._session, [ 665.186240] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 665.186240] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] for vif in network_info: [ 665.186240] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 665.186240] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] return self._sync_wrapper(fn, *args, **kwargs) [ 665.186240] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 665.186240] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] self.wait() [ 665.186240] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 665.186240] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] self[:] = self._gt.wait() [ 665.186240] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 665.186240] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] return self._exit_event.wait() [ 665.186240] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 665.186240] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] current.throw(*self._exc) [ 665.186548] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 665.186548] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] result = function(*args, **kwargs) [ 665.186548] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 665.186548] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] return func(*args, **kwargs) [ 665.186548] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 665.186548] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] raise e [ 665.186548] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 665.186548] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] nwinfo = self.network_api.allocate_for_instance( [ 665.186548] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 665.186548] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] created_port_ids = self._update_ports_for_instance( [ 665.186548] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 665.186548] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] with excutils.save_and_reraise_exception(): [ 665.186548] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.186855] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] self.force_reraise() [ 665.186855] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.186855] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] raise self.value [ 665.186855] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 665.186855] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] updated_port = self._update_port( [ 665.186855] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 665.186855] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] _ensure_no_port_binding_failure(port) [ 665.186855] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 665.186855] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] raise exception.PortBindingFailed(port_id=port['id']) [ 665.186855] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] nova.exception.PortBindingFailed: Binding failed for port b213872d-112d-4e10-92df-607c1c61e42c, please check neutron logs for more information. [ 665.186855] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] [ 665.186855] env[61629]: INFO nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Terminating instance [ 665.188124] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "refresh_cache-cbcb5b42-06ab-41e4-ad08-d285b0863bfb" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.342349] env[61629]: DEBUG nova.network.neutron [req-6c7fcc39-4c7c-48da-bb84-73a19f160021 req-016b219e-7551-4cb0-bf51-e7b6c7996a82 service nova] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 665.427688] env[61629]: DEBUG nova.network.neutron [req-6c7fcc39-4c7c-48da-bb84-73a19f160021 req-016b219e-7551-4cb0-bf51-e7b6c7996a82 service nova] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.611701] env[61629]: DEBUG nova.network.neutron [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 665.635850] env[61629]: DEBUG nova.network.neutron [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 665.658487] env[61629]: DEBUG nova.network.neutron [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.717088] env[61629]: DEBUG nova.network.neutron [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.930883] env[61629]: DEBUG oslo_concurrency.lockutils [req-6c7fcc39-4c7c-48da-bb84-73a19f160021 req-016b219e-7551-4cb0-bf51-e7b6c7996a82 service nova] Releasing lock "refresh_cache-cbcb5b42-06ab-41e4-ad08-d285b0863bfb" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.931465] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquired lock "refresh_cache-cbcb5b42-06ab-41e4-ad08-d285b0863bfb" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.931724] env[61629]: DEBUG nova.network.neutron [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 665.997288] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-887614a1-db88-4ba7-a15a-3b8efb297bf7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.005149] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e0a5d3-4106-424a-8210-c265ca6339a0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.034392] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06b568e-a09c-4086-a7a2-7deba69dad74 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.041183] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a639778-2e84-41bd-942c-8b66ea00fbeb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.055129] env[61629]: DEBUG nova.compute.provider_tree [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.161525] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Releasing lock "refresh_cache-2315bd37-6151-42d7-8b54-9ee367be0ed1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.162125] env[61629]: DEBUG nova.compute.manager [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 666.162389] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 666.163321] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f78b63-40ad-42dd-a1fc-dbbee7ed79a2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.171664] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 666.173029] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef2f7703-fdc9-49d4-9050-00491b67f505 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.178630] env[61629]: DEBUG oslo_vmware.api [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 666.178630] env[61629]: value = "task-1354050" [ 666.178630] env[61629]: _type = "Task" [ 666.178630] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.186390] env[61629]: DEBUG oslo_vmware.api [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354050, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.220568] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Releasing lock "refresh_cache-d43d47a2-a27b-4bb8-9421-61805064a3d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.220844] env[61629]: DEBUG nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 666.220988] env[61629]: DEBUG nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 666.221165] env[61629]: DEBUG nova.network.neutron [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 666.236998] env[61629]: DEBUG nova.network.neutron [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.450580] env[61629]: DEBUG nova.network.neutron [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.520292] env[61629]: DEBUG nova.network.neutron [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.558709] env[61629]: DEBUG nova.scheduler.client.report [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 666.698463] env[61629]: DEBUG oslo_vmware.api [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354050, 'name': PowerOffVM_Task, 'duration_secs': 0.190129} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.698771] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 666.698888] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 666.699150] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a78ceba8-27a8-4dff-a0d9-9ef29735db01 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.727596] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 666.727799] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 666.727980] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Deleting the datastore file [datastore2] 2315bd37-6151-42d7-8b54-9ee367be0ed1 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 666.728275] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c39869d-0cd4-493d-aa93-b3628fc7313e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.735982] env[61629]: DEBUG oslo_vmware.api [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 666.735982] env[61629]: value = "task-1354052" [ 666.735982] env[61629]: _type = "Task" [ 666.735982] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.739995] env[61629]: DEBUG nova.network.neutron [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.743616] env[61629]: DEBUG oslo_vmware.api [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354052, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.861013] env[61629]: DEBUG nova.compute.manager [req-aab90461-2acc-434e-8765-e669cf3def5b req-53b2bc0b-8315-4ef6-8066-919f6b7d095c service nova] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Received event network-vif-deleted-b213872d-112d-4e10-92df-607c1c61e42c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 667.022405] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Releasing lock "refresh_cache-cbcb5b42-06ab-41e4-ad08-d285b0863bfb" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.022801] env[61629]: DEBUG nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 667.023033] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 667.023334] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-496a226d-c9a2-4001-b37b-8381c900b5f9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.032856] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3da5211-3bc8-483b-984e-1032ec6c13f6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.055923] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cbcb5b42-06ab-41e4-ad08-d285b0863bfb could not be found. [ 667.056135] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 667.056316] env[61629]: INFO nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Took 0.03 seconds to destroy the instance on the hypervisor. [ 667.056549] env[61629]: DEBUG oslo.service.loopingcall [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 667.056760] env[61629]: DEBUG nova.compute.manager [-] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 667.056849] env[61629]: DEBUG nova.network.neutron [-] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 667.063215] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.953s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.063764] env[61629]: ERROR nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5881b127-84c0-467c-99b3-a3b33be5a839, please check neutron logs for more information. [ 667.063764] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Traceback (most recent call last): [ 667.063764] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 667.063764] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] self.driver.spawn(context, instance, image_meta, [ 667.063764] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 667.063764] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 667.063764] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 667.063764] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] vm_ref = self.build_virtual_machine(instance, [ 667.063764] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 667.063764] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] vif_infos = vmwarevif.get_vif_info(self._session, [ 667.063764] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 667.064091] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] for vif in network_info: [ 667.064091] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 667.064091] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] return self._sync_wrapper(fn, *args, **kwargs) [ 667.064091] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 667.064091] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] self.wait() [ 667.064091] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 667.064091] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] self[:] = self._gt.wait() [ 667.064091] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 667.064091] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] return self._exit_event.wait() [ 667.064091] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 667.064091] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] result = hub.switch() [ 667.064091] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 667.064091] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] return self.greenlet.switch() [ 667.064468] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 667.064468] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] result = function(*args, **kwargs) [ 667.064468] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 667.064468] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] return func(*args, **kwargs) [ 667.064468] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 667.064468] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] raise e [ 667.064468] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 667.064468] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] nwinfo = self.network_api.allocate_for_instance( [ 667.064468] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 667.064468] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] created_port_ids = self._update_ports_for_instance( [ 667.064468] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 667.064468] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] with excutils.save_and_reraise_exception(): [ 667.064468] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 667.064812] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] self.force_reraise() [ 667.064812] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 667.064812] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] raise self.value [ 667.064812] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 667.064812] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] updated_port = self._update_port( [ 667.064812] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 667.064812] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] _ensure_no_port_binding_failure(port) [ 667.064812] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 667.064812] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] raise exception.PortBindingFailed(port_id=port['id']) [ 667.064812] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] nova.exception.PortBindingFailed: Binding failed for port 5881b127-84c0-467c-99b3-a3b33be5a839, please check neutron logs for more information. [ 667.064812] env[61629]: ERROR nova.compute.manager [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] [ 667.065138] env[61629]: DEBUG nova.compute.utils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Binding failed for port 5881b127-84c0-467c-99b3-a3b33be5a839, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 667.065572] env[61629]: DEBUG oslo_concurrency.lockutils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.608s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.066969] env[61629]: INFO nova.compute.claims [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 667.069490] env[61629]: DEBUG nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Build of instance 4839c06e-f55a-4162-8eae-cfaeae07cdae was re-scheduled: Binding failed for port 5881b127-84c0-467c-99b3-a3b33be5a839, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 667.069955] env[61629]: DEBUG nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 667.070196] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Acquiring lock "refresh_cache-4839c06e-f55a-4162-8eae-cfaeae07cdae" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.070341] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Acquired lock "refresh_cache-4839c06e-f55a-4162-8eae-cfaeae07cdae" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.070498] env[61629]: DEBUG nova.network.neutron [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 667.071764] env[61629]: DEBUG nova.network.neutron [-] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.247690] env[61629]: INFO nova.compute.manager [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: d43d47a2-a27b-4bb8-9421-61805064a3d2] Took 1.03 seconds to deallocate network for instance. [ 667.250413] env[61629]: DEBUG oslo_vmware.api [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354052, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115938} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.250466] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 667.250622] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 667.250790] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 667.250957] env[61629]: INFO nova.compute.manager [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Took 1.09 seconds to destroy the instance on the hypervisor. [ 667.251219] env[61629]: DEBUG oslo.service.loopingcall [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 667.251686] env[61629]: DEBUG nova.compute.manager [-] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 667.251686] env[61629]: DEBUG nova.network.neutron [-] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 667.271477] env[61629]: DEBUG nova.network.neutron [-] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.578222] env[61629]: DEBUG nova.network.neutron [-] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.593101] env[61629]: DEBUG nova.network.neutron [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.675707] env[61629]: DEBUG nova.network.neutron [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.777190] env[61629]: DEBUG nova.network.neutron [-] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.083279] env[61629]: INFO nova.compute.manager [-] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Took 1.03 seconds to deallocate network for instance. [ 668.085748] env[61629]: DEBUG nova.compute.claims [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 668.085748] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.177653] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Releasing lock "refresh_cache-4839c06e-f55a-4162-8eae-cfaeae07cdae" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.177881] env[61629]: DEBUG nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 668.178067] env[61629]: DEBUG nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 668.178231] env[61629]: DEBUG nova.network.neutron [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 668.193027] env[61629]: DEBUG nova.network.neutron [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 668.277632] env[61629]: INFO nova.scheduler.client.report [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Deleted allocations for instance d43d47a2-a27b-4bb8-9421-61805064a3d2 [ 668.283619] env[61629]: INFO nova.compute.manager [-] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Took 1.03 seconds to deallocate network for instance. [ 668.460468] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d275b454-ee03-4ce8-b202-4271e037e712 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.468946] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf5d748-8c75-4113-a23e-60bd2b7321d4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.500444] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f826c7fa-4e0a-4156-baa8-a4f5709ca244 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.508770] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627135b9-50b6-4e47-aac4-adb2400c9e00 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.522520] env[61629]: DEBUG nova.compute.provider_tree [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 668.697913] env[61629]: DEBUG nova.network.neutron [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.793566] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.793996] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c7a172bc-df1d-493a-ae59-e3ba7ad2faa5 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Lock "d43d47a2-a27b-4bb8-9421-61805064a3d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.142s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.025829] env[61629]: DEBUG nova.scheduler.client.report [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 669.204803] env[61629]: INFO nova.compute.manager [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] [instance: 4839c06e-f55a-4162-8eae-cfaeae07cdae] Took 1.03 seconds to deallocate network for instance. [ 669.281322] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Acquiring lock "fe6adbf6-be78-45ee-a136-b7e538fb124b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.281551] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Lock "fe6adbf6-be78-45ee-a136-b7e538fb124b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.296771] env[61629]: DEBUG nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 669.531771] env[61629]: DEBUG oslo_concurrency.lockutils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.466s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.532399] env[61629]: DEBUG nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 669.535478] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.762s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.818463] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.043611] env[61629]: DEBUG nova.compute.utils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 670.046526] env[61629]: DEBUG nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 670.046640] env[61629]: DEBUG nova.network.neutron [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 670.107323] env[61629]: DEBUG nova.policy [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '72ebfc847617475fac00022cd850c291', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a373f8ea0a743aa9f7d36568973e9ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 670.232551] env[61629]: INFO nova.scheduler.client.report [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Deleted allocations for instance 4839c06e-f55a-4162-8eae-cfaeae07cdae [ 670.435137] env[61629]: DEBUG nova.network.neutron [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Successfully created port: 6553eeaf-691c-4f78-b738-44600d6fd47f {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 670.496648] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ebeb77-dda5-4df7-b644-7f70c3ded1e3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.505297] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355b8f36-3878-43be-8731-08f7f6852304 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.536540] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52797753-39c5-4ae6-a84c-a3af3a1ba4aa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.544906] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cfa86bb-a1ab-44fa-a3f8-21dcafbeacd5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.549108] env[61629]: DEBUG nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 670.561868] env[61629]: DEBUG nova.compute.provider_tree [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.745492] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a645c6bf-9e5a-4b8b-8469-da8e3a723a45 tempest-ServersAdminTestJSON-1999547687 tempest-ServersAdminTestJSON-1999547687-project-member] Lock "4839c06e-f55a-4162-8eae-cfaeae07cdae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.723s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.067019] env[61629]: DEBUG nova.scheduler.client.report [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 671.250448] env[61629]: DEBUG nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 671.341438] env[61629]: DEBUG nova.compute.manager [req-bcf0eedc-2180-4181-a1dc-838f89ee7f73 req-c4a33e1f-2599-402c-a8f9-0cfec2a15325 service nova] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Received event network-changed-6553eeaf-691c-4f78-b738-44600d6fd47f {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 671.341621] env[61629]: DEBUG nova.compute.manager [req-bcf0eedc-2180-4181-a1dc-838f89ee7f73 req-c4a33e1f-2599-402c-a8f9-0cfec2a15325 service nova] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Refreshing instance network info cache due to event network-changed-6553eeaf-691c-4f78-b738-44600d6fd47f. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 671.341829] env[61629]: DEBUG oslo_concurrency.lockutils [req-bcf0eedc-2180-4181-a1dc-838f89ee7f73 req-c4a33e1f-2599-402c-a8f9-0cfec2a15325 service nova] Acquiring lock "refresh_cache-842633ee-19a5-44d6-bdef-c9f81e5af11e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.342079] env[61629]: DEBUG oslo_concurrency.lockutils [req-bcf0eedc-2180-4181-a1dc-838f89ee7f73 req-c4a33e1f-2599-402c-a8f9-0cfec2a15325 service nova] Acquired lock "refresh_cache-842633ee-19a5-44d6-bdef-c9f81e5af11e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.342226] env[61629]: DEBUG nova.network.neutron [req-bcf0eedc-2180-4181-a1dc-838f89ee7f73 req-c4a33e1f-2599-402c-a8f9-0cfec2a15325 service nova] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Refreshing network info cache for port 6553eeaf-691c-4f78-b738-44600d6fd47f {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 671.448172] env[61629]: ERROR nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6553eeaf-691c-4f78-b738-44600d6fd47f, please check neutron logs for more information. [ 671.448172] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 671.448172] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 671.448172] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 671.448172] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 671.448172] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 671.448172] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 671.448172] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 671.448172] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 671.448172] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 671.448172] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 671.448172] env[61629]: ERROR nova.compute.manager raise self.value [ 671.448172] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 671.448172] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 671.448172] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 671.448172] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 671.448616] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 671.448616] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 671.448616] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6553eeaf-691c-4f78-b738-44600d6fd47f, please check neutron logs for more information. [ 671.448616] env[61629]: ERROR nova.compute.manager [ 671.448616] env[61629]: Traceback (most recent call last): [ 671.448616] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 671.448616] env[61629]: listener.cb(fileno) [ 671.448616] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 671.448616] env[61629]: result = function(*args, **kwargs) [ 671.448616] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 671.448616] env[61629]: return func(*args, **kwargs) [ 671.448616] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 671.448616] env[61629]: raise e [ 671.448616] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 671.448616] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 671.448616] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 671.448616] env[61629]: created_port_ids = self._update_ports_for_instance( [ 671.448616] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 671.448616] env[61629]: with excutils.save_and_reraise_exception(): [ 671.448616] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 671.448616] env[61629]: self.force_reraise() [ 671.448616] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 671.448616] env[61629]: raise self.value [ 671.448616] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 671.448616] env[61629]: updated_port = self._update_port( [ 671.448616] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 671.448616] env[61629]: _ensure_no_port_binding_failure(port) [ 671.448616] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 671.448616] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 671.449334] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 6553eeaf-691c-4f78-b738-44600d6fd47f, please check neutron logs for more information. [ 671.449334] env[61629]: Removing descriptor: 21 [ 671.560159] env[61629]: DEBUG nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 671.569195] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.034s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.569979] env[61629]: ERROR nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1b89f1d7-2c1f-40fa-a812-8ab8ced966d1, please check neutron logs for more information. [ 671.569979] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Traceback (most recent call last): [ 671.569979] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 671.569979] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] self.driver.spawn(context, instance, image_meta, [ 671.569979] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 671.569979] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 671.569979] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 671.569979] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] vm_ref = self.build_virtual_machine(instance, [ 671.569979] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 671.569979] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] vif_infos = vmwarevif.get_vif_info(self._session, [ 671.569979] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 671.570401] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] for vif in network_info: [ 671.570401] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 671.570401] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] return self._sync_wrapper(fn, *args, **kwargs) [ 671.570401] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 671.570401] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] self.wait() [ 671.570401] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 671.570401] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] self[:] = self._gt.wait() [ 671.570401] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 671.570401] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] return self._exit_event.wait() [ 671.570401] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 671.570401] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] result = hub.switch() [ 671.570401] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 671.570401] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] return self.greenlet.switch() [ 671.570772] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 671.570772] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] result = function(*args, **kwargs) [ 671.570772] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 671.570772] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] return func(*args, **kwargs) [ 671.570772] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 671.570772] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] raise e [ 671.570772] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 671.570772] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] nwinfo = self.network_api.allocate_for_instance( [ 671.570772] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 671.570772] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] created_port_ids = self._update_ports_for_instance( [ 671.570772] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 671.570772] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] with excutils.save_and_reraise_exception(): [ 671.570772] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 671.571232] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] self.force_reraise() [ 671.571232] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 671.571232] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] raise self.value [ 671.571232] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 671.571232] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] updated_port = self._update_port( [ 671.571232] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 671.571232] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] _ensure_no_port_binding_failure(port) [ 671.571232] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 671.571232] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] raise exception.PortBindingFailed(port_id=port['id']) [ 671.571232] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] nova.exception.PortBindingFailed: Binding failed for port 1b89f1d7-2c1f-40fa-a812-8ab8ced966d1, please check neutron logs for more information. [ 671.571232] env[61629]: ERROR nova.compute.manager [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] [ 671.571501] env[61629]: DEBUG nova.compute.utils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Binding failed for port 1b89f1d7-2c1f-40fa-a812-8ab8ced966d1, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 671.571921] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.647s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.574772] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Build of instance be2db738-cfe9-4720-b348-c7b03f28e96b was re-scheduled: Binding failed for port 1b89f1d7-2c1f-40fa-a812-8ab8ced966d1, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 671.575203] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 671.576028] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "refresh_cache-be2db738-cfe9-4720-b348-c7b03f28e96b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.576028] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquired lock "refresh_cache-be2db738-cfe9-4720-b348-c7b03f28e96b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.576028] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 671.591719] env[61629]: DEBUG nova.virt.hardware [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 671.592029] env[61629]: DEBUG nova.virt.hardware [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 671.592199] env[61629]: DEBUG nova.virt.hardware [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 671.592382] env[61629]: DEBUG nova.virt.hardware [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 671.592523] env[61629]: DEBUG nova.virt.hardware [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 671.592665] env[61629]: DEBUG nova.virt.hardware [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 671.592867] env[61629]: DEBUG nova.virt.hardware [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 671.593071] env[61629]: DEBUG nova.virt.hardware [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 671.593272] env[61629]: DEBUG nova.virt.hardware [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 671.593455] env[61629]: DEBUG nova.virt.hardware [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 671.593631] env[61629]: DEBUG nova.virt.hardware [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 671.594690] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91368ef1-8f44-42ae-91bd-2683f98139ef {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.603814] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cab6e75-a770-4db4-a74e-ebebc252934e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.619531] env[61629]: ERROR nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6553eeaf-691c-4f78-b738-44600d6fd47f, please check neutron logs for more information. [ 671.619531] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Traceback (most recent call last): [ 671.619531] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 671.619531] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] yield resources [ 671.619531] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 671.619531] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] self.driver.spawn(context, instance, image_meta, [ 671.619531] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 671.619531] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 671.619531] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 671.619531] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] vm_ref = self.build_virtual_machine(instance, [ 671.619531] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 671.619983] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] vif_infos = vmwarevif.get_vif_info(self._session, [ 671.619983] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 671.619983] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] for vif in network_info: [ 671.619983] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 671.619983] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] return self._sync_wrapper(fn, *args, **kwargs) [ 671.619983] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 671.619983] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] self.wait() [ 671.619983] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 671.619983] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] self[:] = self._gt.wait() [ 671.619983] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 671.619983] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] return self._exit_event.wait() [ 671.619983] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 671.619983] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] current.throw(*self._exc) [ 671.620423] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 671.620423] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] result = function(*args, **kwargs) [ 671.620423] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 671.620423] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] return func(*args, **kwargs) [ 671.620423] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 671.620423] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] raise e [ 671.620423] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 671.620423] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] nwinfo = self.network_api.allocate_for_instance( [ 671.620423] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 671.620423] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] created_port_ids = self._update_ports_for_instance( [ 671.620423] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 671.620423] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] with excutils.save_and_reraise_exception(): [ 671.620423] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 671.621089] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] self.force_reraise() [ 671.621089] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 671.621089] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] raise self.value [ 671.621089] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 671.621089] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] updated_port = self._update_port( [ 671.621089] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 671.621089] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] _ensure_no_port_binding_failure(port) [ 671.621089] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 671.621089] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] raise exception.PortBindingFailed(port_id=port['id']) [ 671.621089] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] nova.exception.PortBindingFailed: Binding failed for port 6553eeaf-691c-4f78-b738-44600d6fd47f, please check neutron logs for more information. [ 671.621089] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] [ 671.621089] env[61629]: INFO nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Terminating instance [ 671.622227] env[61629]: DEBUG oslo_concurrency.lockutils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Acquiring lock "refresh_cache-842633ee-19a5-44d6-bdef-c9f81e5af11e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.775120] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.864732] env[61629]: DEBUG nova.network.neutron [req-bcf0eedc-2180-4181-a1dc-838f89ee7f73 req-c4a33e1f-2599-402c-a8f9-0cfec2a15325 service nova] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 672.050373] env[61629]: DEBUG nova.network.neutron [req-bcf0eedc-2180-4181-a1dc-838f89ee7f73 req-c4a33e1f-2599-402c-a8f9-0cfec2a15325 service nova] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.097481] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 672.256454] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.476686] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68032c9-19d9-42fe-af77-85f97f344d12 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.487129] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732b00f3-7912-42e8-be03-5b3f7bcc0496 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.515113] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54935972-3748-446c-bb59-33213a05112d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.522771] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22049674-8850-4190-ac90-600b23f0ed21 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.536502] env[61629]: DEBUG nova.compute.provider_tree [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.552973] env[61629]: DEBUG oslo_concurrency.lockutils [req-bcf0eedc-2180-4181-a1dc-838f89ee7f73 req-c4a33e1f-2599-402c-a8f9-0cfec2a15325 service nova] Releasing lock "refresh_cache-842633ee-19a5-44d6-bdef-c9f81e5af11e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 672.553387] env[61629]: DEBUG oslo_concurrency.lockutils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Acquired lock "refresh_cache-842633ee-19a5-44d6-bdef-c9f81e5af11e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.553573] env[61629]: DEBUG nova.network.neutron [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 672.759165] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Releasing lock "refresh_cache-be2db738-cfe9-4720-b348-c7b03f28e96b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 672.759437] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 672.759622] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 672.760055] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 672.785306] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.039920] env[61629]: DEBUG nova.scheduler.client.report [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 673.080563] env[61629]: DEBUG nova.network.neutron [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.154907] env[61629]: DEBUG nova.network.neutron [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.289668] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.368016] env[61629]: DEBUG nova.compute.manager [req-20c34010-f33e-4f77-a1ae-69c8b9b57c43 req-5e7014da-ed55-411c-add6-ff51f32c8674 service nova] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Received event network-vif-deleted-6553eeaf-691c-4f78-b738-44600d6fd47f {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 673.547936] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.976s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.548628] env[61629]: ERROR nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ccaa17cb-d02f-4f92-bf54-7140be7e5cd8, please check neutron logs for more information. [ 673.548628] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Traceback (most recent call last): [ 673.548628] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 673.548628] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] self.driver.spawn(context, instance, image_meta, [ 673.548628] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 673.548628] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 673.548628] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 673.548628] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] vm_ref = self.build_virtual_machine(instance, [ 673.548628] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 673.548628] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] vif_infos = vmwarevif.get_vif_info(self._session, [ 673.548628] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 673.548905] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] for vif in network_info: [ 673.548905] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 673.548905] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] return self._sync_wrapper(fn, *args, **kwargs) [ 673.548905] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 673.548905] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] self.wait() [ 673.548905] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 673.548905] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] self[:] = self._gt.wait() [ 673.548905] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 673.548905] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] return self._exit_event.wait() [ 673.548905] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 673.548905] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] result = hub.switch() [ 673.548905] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 673.548905] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] return self.greenlet.switch() [ 673.549237] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 673.549237] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] result = function(*args, **kwargs) [ 673.549237] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 673.549237] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] return func(*args, **kwargs) [ 673.549237] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 673.549237] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] raise e [ 673.549237] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 673.549237] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] nwinfo = self.network_api.allocate_for_instance( [ 673.549237] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 673.549237] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] created_port_ids = self._update_ports_for_instance( [ 673.549237] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 673.549237] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] with excutils.save_and_reraise_exception(): [ 673.549237] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 673.549546] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] self.force_reraise() [ 673.549546] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 673.549546] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] raise self.value [ 673.549546] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 673.549546] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] updated_port = self._update_port( [ 673.549546] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 673.549546] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] _ensure_no_port_binding_failure(port) [ 673.549546] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 673.549546] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] raise exception.PortBindingFailed(port_id=port['id']) [ 673.549546] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] nova.exception.PortBindingFailed: Binding failed for port ccaa17cb-d02f-4f92-bf54-7140be7e5cd8, please check neutron logs for more information. [ 673.549546] env[61629]: ERROR nova.compute.manager [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] [ 673.549816] env[61629]: DEBUG nova.compute.utils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Binding failed for port ccaa17cb-d02f-4f92-bf54-7140be7e5cd8, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 673.550558] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.467s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.552461] env[61629]: INFO nova.compute.claims [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 673.555606] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Build of instance 6ba7ca7d-173d-41d3-b523-3548a67397c4 was re-scheduled: Binding failed for port ccaa17cb-d02f-4f92-bf54-7140be7e5cd8, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 673.556112] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 673.556402] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquiring lock "refresh_cache-6ba7ca7d-173d-41d3-b523-3548a67397c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 673.556607] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Acquired lock "refresh_cache-6ba7ca7d-173d-41d3-b523-3548a67397c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.556820] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 673.657306] env[61629]: DEBUG oslo_concurrency.lockutils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Releasing lock "refresh_cache-842633ee-19a5-44d6-bdef-c9f81e5af11e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.657764] env[61629]: DEBUG nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 673.657960] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 673.658277] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-714ae1ba-cea9-477c-b79f-598943456e5a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.669430] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2405b1d6-9121-4b69-9632-74a650876b3f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.694679] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 842633ee-19a5-44d6-bdef-c9f81e5af11e could not be found. [ 673.694938] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 673.695152] env[61629]: INFO nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 673.695417] env[61629]: DEBUG oslo.service.loopingcall [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 673.695641] env[61629]: DEBUG nova.compute.manager [-] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 673.695750] env[61629]: DEBUG nova.network.neutron [-] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 673.711253] env[61629]: DEBUG nova.network.neutron [-] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.792434] env[61629]: INFO nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: be2db738-cfe9-4720-b348-c7b03f28e96b] Took 1.03 seconds to deallocate network for instance. [ 674.081151] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 674.176262] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.213490] env[61629]: DEBUG nova.network.neutron [-] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.687497] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Releasing lock "refresh_cache-6ba7ca7d-173d-41d3-b523-3548a67397c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.687639] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 674.690362] env[61629]: DEBUG nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 674.690362] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 674.712720] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 674.717508] env[61629]: INFO nova.compute.manager [-] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Took 1.02 seconds to deallocate network for instance. [ 674.721927] env[61629]: DEBUG nova.compute.claims [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 674.722170] env[61629]: DEBUG oslo_concurrency.lockutils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.818690] env[61629]: INFO nova.scheduler.client.report [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Deleted allocations for instance be2db738-cfe9-4720-b348-c7b03f28e96b [ 675.008312] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118e1eeb-1b78-4c71-b8b2-ce7934afaad4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.017116] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f99f8b32-8f03-46da-a54b-a2e9183d0bfc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.048050] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ab3372-e655-4482-8a71-aab3481bb53d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.056314] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc7deef-1363-4519-920c-b6efc3cca424 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.069703] env[61629]: DEBUG nova.compute.provider_tree [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.220686] env[61629]: DEBUG nova.network.neutron [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.329176] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "be2db738-cfe9-4720-b348-c7b03f28e96b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.697s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.573249] env[61629]: DEBUG nova.scheduler.client.report [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 675.723141] env[61629]: INFO nova.compute.manager [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] [instance: 6ba7ca7d-173d-41d3-b523-3548a67397c4] Took 1.03 seconds to deallocate network for instance. [ 675.832527] env[61629]: DEBUG nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 676.078215] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.527s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.078764] env[61629]: DEBUG nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 676.081551] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.536s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.354178] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.586106] env[61629]: DEBUG nova.compute.utils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 676.591448] env[61629]: DEBUG nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 676.591617] env[61629]: DEBUG nova.network.neutron [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 676.708749] env[61629]: DEBUG nova.policy [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4b88bedfe104f95910e4ff089cd84eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae39158b8e574e62833e1b068ce8f5ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 676.792222] env[61629]: INFO nova.scheduler.client.report [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Deleted allocations for instance 6ba7ca7d-173d-41d3-b523-3548a67397c4 [ 677.082461] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89615d16-0a30-4eea-94c8-fd5f9314e135 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.090707] env[61629]: DEBUG nova.network.neutron [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Successfully created port: b80aeac8-416d-4020-9230-566a651c290a {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.093191] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff6e66f-e547-4244-b562-491cde3c0340 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.098055] env[61629]: DEBUG nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 677.132245] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65eb4959-8301-410b-88e5-f9e0f1786aaa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.142545] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059c7eca-2f6d-4c60-b705-7ae1af5ce7bf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.159212] env[61629]: DEBUG nova.compute.provider_tree [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.307246] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed9928db-5698-42c9-822a-33345e4e329e tempest-MultipleCreateTestJSON-881524166 tempest-MultipleCreateTestJSON-881524166-project-member] Lock "6ba7ca7d-173d-41d3-b523-3548a67397c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.607s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.661228] env[61629]: DEBUG nova.scheduler.client.report [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 677.810685] env[61629]: DEBUG nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 678.108321] env[61629]: DEBUG nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 678.116672] env[61629]: ERROR nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b80aeac8-416d-4020-9230-566a651c290a, please check neutron logs for more information. [ 678.116672] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 678.116672] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.116672] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 678.116672] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.116672] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 678.116672] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.116672] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 678.116672] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.116672] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 678.116672] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.116672] env[61629]: ERROR nova.compute.manager raise self.value [ 678.116672] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.116672] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 678.116672] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.116672] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 678.117129] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.117129] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 678.117129] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b80aeac8-416d-4020-9230-566a651c290a, please check neutron logs for more information. [ 678.117129] env[61629]: ERROR nova.compute.manager [ 678.117129] env[61629]: Traceback (most recent call last): [ 678.117129] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 678.117129] env[61629]: listener.cb(fileno) [ 678.117129] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 678.117129] env[61629]: result = function(*args, **kwargs) [ 678.117129] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 678.117129] env[61629]: return func(*args, **kwargs) [ 678.117411] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 678.117411] env[61629]: raise e [ 678.117411] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.117411] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 678.117411] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.117411] env[61629]: created_port_ids = self._update_ports_for_instance( [ 678.117411] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.117411] env[61629]: with excutils.save_and_reraise_exception(): [ 678.117411] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.117411] env[61629]: self.force_reraise() [ 678.117411] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.117411] env[61629]: raise self.value [ 678.117411] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.117411] env[61629]: updated_port = self._update_port( [ 678.117411] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.117411] env[61629]: _ensure_no_port_binding_failure(port) [ 678.117411] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.117411] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 678.117411] env[61629]: nova.exception.PortBindingFailed: Binding failed for port b80aeac8-416d-4020-9230-566a651c290a, please check neutron logs for more information. [ 678.117411] env[61629]: Removing descriptor: 21 [ 678.141650] env[61629]: DEBUG nova.virt.hardware [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 678.142542] env[61629]: DEBUG nova.virt.hardware [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 678.143015] env[61629]: DEBUG nova.virt.hardware [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 678.143015] env[61629]: DEBUG nova.virt.hardware [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 678.143162] env[61629]: DEBUG nova.virt.hardware [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 678.143294] env[61629]: DEBUG nova.virt.hardware [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 678.143507] env[61629]: DEBUG nova.virt.hardware [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 678.143668] env[61629]: DEBUG nova.virt.hardware [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 678.143836] env[61629]: DEBUG nova.virt.hardware [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 678.144007] env[61629]: DEBUG nova.virt.hardware [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 678.144199] env[61629]: DEBUG nova.virt.hardware [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 678.145095] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed19de1-981c-4597-bc3c-e38fb7c972b2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.151615] env[61629]: DEBUG nova.compute.manager [req-96432627-d9c4-4dbe-82f9-67d5605d3e7a req-4750075c-5453-488a-9595-5b0218b33a1c service nova] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Received event network-changed-b80aeac8-416d-4020-9230-566a651c290a {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 678.151780] env[61629]: DEBUG nova.compute.manager [req-96432627-d9c4-4dbe-82f9-67d5605d3e7a req-4750075c-5453-488a-9595-5b0218b33a1c service nova] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Refreshing instance network info cache due to event network-changed-b80aeac8-416d-4020-9230-566a651c290a. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 678.152014] env[61629]: DEBUG oslo_concurrency.lockutils [req-96432627-d9c4-4dbe-82f9-67d5605d3e7a req-4750075c-5453-488a-9595-5b0218b33a1c service nova] Acquiring lock "refresh_cache-c332c6fd-1edd-4d9e-85a9-32a408f9d05e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.152220] env[61629]: DEBUG oslo_concurrency.lockutils [req-96432627-d9c4-4dbe-82f9-67d5605d3e7a req-4750075c-5453-488a-9595-5b0218b33a1c service nova] Acquired lock "refresh_cache-c332c6fd-1edd-4d9e-85a9-32a408f9d05e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.152389] env[61629]: DEBUG nova.network.neutron [req-96432627-d9c4-4dbe-82f9-67d5605d3e7a req-4750075c-5453-488a-9595-5b0218b33a1c service nova] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Refreshing network info cache for port b80aeac8-416d-4020-9230-566a651c290a {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 678.157157] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25cc872-3af2-43f2-a65a-d65e2323b79f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.175333] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.093s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.176074] env[61629]: ERROR nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 15d38b73-42a2-4d0b-9550-84f7c88392ee, please check neutron logs for more information. [ 678.176074] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Traceback (most recent call last): [ 678.176074] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 678.176074] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] self.driver.spawn(context, instance, image_meta, [ 678.176074] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 678.176074] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 678.176074] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 678.176074] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] vm_ref = self.build_virtual_machine(instance, [ 678.176074] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 678.176074] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] vif_infos = vmwarevif.get_vif_info(self._session, [ 678.176074] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 678.176434] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] for vif in network_info: [ 678.176434] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 678.176434] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] return self._sync_wrapper(fn, *args, **kwargs) [ 678.176434] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 678.176434] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] self.wait() [ 678.176434] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 678.176434] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] self[:] = self._gt.wait() [ 678.176434] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 678.176434] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] return self._exit_event.wait() [ 678.176434] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 678.176434] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] current.throw(*self._exc) [ 678.176434] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 678.176434] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] result = function(*args, **kwargs) [ 678.176764] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 678.176764] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] return func(*args, **kwargs) [ 678.176764] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 678.176764] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] raise e [ 678.176764] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.176764] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] nwinfo = self.network_api.allocate_for_instance( [ 678.176764] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.176764] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] created_port_ids = self._update_ports_for_instance( [ 678.176764] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.176764] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] with excutils.save_and_reraise_exception(): [ 678.176764] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.176764] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] self.force_reraise() [ 678.176764] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.177119] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] raise self.value [ 678.177119] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.177119] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] updated_port = self._update_port( [ 678.177119] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.177119] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] _ensure_no_port_binding_failure(port) [ 678.177119] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.177119] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] raise exception.PortBindingFailed(port_id=port['id']) [ 678.177119] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] nova.exception.PortBindingFailed: Binding failed for port 15d38b73-42a2-4d0b-9550-84f7c88392ee, please check neutron logs for more information. [ 678.177119] env[61629]: ERROR nova.compute.manager [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] [ 678.177119] env[61629]: DEBUG nova.compute.utils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Binding failed for port 15d38b73-42a2-4d0b-9550-84f7c88392ee, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 678.180411] env[61629]: ERROR nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b80aeac8-416d-4020-9230-566a651c290a, please check neutron logs for more information. [ 678.180411] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Traceback (most recent call last): [ 678.180411] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 678.180411] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] yield resources [ 678.180411] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 678.180411] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] self.driver.spawn(context, instance, image_meta, [ 678.180411] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 678.180411] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 678.180411] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 678.180411] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] vm_ref = self.build_virtual_machine(instance, [ 678.180411] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 678.180761] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] vif_infos = vmwarevif.get_vif_info(self._session, [ 678.180761] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 678.180761] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] for vif in network_info: [ 678.180761] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 678.180761] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] return self._sync_wrapper(fn, *args, **kwargs) [ 678.180761] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 678.180761] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] self.wait() [ 678.180761] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 678.180761] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] self[:] = self._gt.wait() [ 678.180761] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 678.180761] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] return self._exit_event.wait() [ 678.180761] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 678.180761] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] current.throw(*self._exc) [ 678.181115] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 678.181115] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] result = function(*args, **kwargs) [ 678.181115] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 678.181115] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] return func(*args, **kwargs) [ 678.181115] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 678.181115] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] raise e [ 678.181115] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.181115] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] nwinfo = self.network_api.allocate_for_instance( [ 678.181115] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.181115] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] created_port_ids = self._update_ports_for_instance( [ 678.181115] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.181115] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] with excutils.save_and_reraise_exception(): [ 678.181115] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.181451] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] self.force_reraise() [ 678.181451] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.181451] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] raise self.value [ 678.181451] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.181451] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] updated_port = self._update_port( [ 678.181451] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.181451] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] _ensure_no_port_binding_failure(port) [ 678.181451] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.181451] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] raise exception.PortBindingFailed(port_id=port['id']) [ 678.181451] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] nova.exception.PortBindingFailed: Binding failed for port b80aeac8-416d-4020-9230-566a651c290a, please check neutron logs for more information. [ 678.181451] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] [ 678.181451] env[61629]: INFO nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Terminating instance [ 678.181759] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Build of instance 26366e41-de20-432b-a37e-5abb07c4ff8d was re-scheduled: Binding failed for port 15d38b73-42a2-4d0b-9550-84f7c88392ee, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 678.181759] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 678.181759] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "refresh_cache-26366e41-de20-432b-a37e-5abb07c4ff8d" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.181887] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquired lock "refresh_cache-26366e41-de20-432b-a37e-5abb07c4ff8d" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.184023] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 678.184023] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.053s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.194234] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Acquiring lock "refresh_cache-c332c6fd-1edd-4d9e-85a9-32a408f9d05e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.336197] env[61629]: DEBUG oslo_concurrency.lockutils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.679052] env[61629]: DEBUG nova.network.neutron [req-96432627-d9c4-4dbe-82f9-67d5605d3e7a req-4750075c-5453-488a-9595-5b0218b33a1c service nova] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.712387] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.836687] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.844178] env[61629]: DEBUG nova.network.neutron [req-96432627-d9c4-4dbe-82f9-67d5605d3e7a req-4750075c-5453-488a-9595-5b0218b33a1c service nova] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.111978] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c6239d-dfd0-4054-89e6-d0b986cc0bab {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.121210] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b651567-7498-4cf9-8598-fedde1abaa2a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.154306] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4f25c9-08a9-4ee4-9b54-673ba6cedcdb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.161437] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33eef04c-91a0-4be9-9298-61665d4499e3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.180511] env[61629]: DEBUG nova.compute.provider_tree [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 679.373620] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Releasing lock "refresh_cache-26366e41-de20-432b-a37e-5abb07c4ff8d" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.373620] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 679.373620] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 679.373620] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 679.373620] env[61629]: DEBUG oslo_concurrency.lockutils [req-96432627-d9c4-4dbe-82f9-67d5605d3e7a req-4750075c-5453-488a-9595-5b0218b33a1c service nova] Releasing lock "refresh_cache-c332c6fd-1edd-4d9e-85a9-32a408f9d05e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.374897] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Acquired lock "refresh_cache-c332c6fd-1edd-4d9e-85a9-32a408f9d05e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.374897] env[61629]: DEBUG nova.network.neutron [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.374897] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.683202] env[61629]: DEBUG nova.scheduler.client.report [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 679.868110] env[61629]: DEBUG nova.network.neutron [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.870951] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.974960] env[61629]: DEBUG nova.network.neutron [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.191987] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.009s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.193257] env[61629]: ERROR nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 70469a0c-33ae-40da-a372-fd6053023643, please check neutron logs for more information. [ 680.193257] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Traceback (most recent call last): [ 680.193257] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 680.193257] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] self.driver.spawn(context, instance, image_meta, [ 680.193257] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 680.193257] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 680.193257] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 680.193257] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] vm_ref = self.build_virtual_machine(instance, [ 680.193257] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 680.193257] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] vif_infos = vmwarevif.get_vif_info(self._session, [ 680.193257] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 680.193808] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] for vif in network_info: [ 680.193808] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 680.193808] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] return self._sync_wrapper(fn, *args, **kwargs) [ 680.193808] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 680.193808] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] self.wait() [ 680.193808] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 680.193808] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] self[:] = self._gt.wait() [ 680.193808] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 680.193808] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] return self._exit_event.wait() [ 680.193808] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 680.193808] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] current.throw(*self._exc) [ 680.193808] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 680.193808] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] result = function(*args, **kwargs) [ 680.194357] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 680.194357] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] return func(*args, **kwargs) [ 680.194357] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 680.194357] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] raise e [ 680.194357] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 680.194357] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] nwinfo = self.network_api.allocate_for_instance( [ 680.194357] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 680.194357] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] created_port_ids = self._update_ports_for_instance( [ 680.194357] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 680.194357] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] with excutils.save_and_reraise_exception(): [ 680.194357] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.194357] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] self.force_reraise() [ 680.194357] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.194887] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] raise self.value [ 680.194887] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 680.194887] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] updated_port = self._update_port( [ 680.194887] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.194887] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] _ensure_no_port_binding_failure(port) [ 680.194887] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.194887] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] raise exception.PortBindingFailed(port_id=port['id']) [ 680.194887] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] nova.exception.PortBindingFailed: Binding failed for port 70469a0c-33ae-40da-a372-fd6053023643, please check neutron logs for more information. [ 680.194887] env[61629]: ERROR nova.compute.manager [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] [ 680.194887] env[61629]: DEBUG nova.compute.utils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Binding failed for port 70469a0c-33ae-40da-a372-fd6053023643, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 680.195931] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 17.017s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.203402] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Build of instance 5670d64c-bddc-4b4a-bdf0-2b039be5e49e was re-scheduled: Binding failed for port 70469a0c-33ae-40da-a372-fd6053023643, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 680.203521] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 680.203835] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "refresh_cache-5670d64c-bddc-4b4a-bdf0-2b039be5e49e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.203903] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquired lock "refresh_cache-5670d64c-bddc-4b4a-bdf0-2b039be5e49e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.204045] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 680.230186] env[61629]: DEBUG nova.compute.manager [req-a4a87c3f-f29e-4ef0-876a-93d91df0d9cf req-f245c8c7-ab26-4fbc-970c-d8044ca06ec2 service nova] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Received event network-vif-deleted-b80aeac8-416d-4020-9230-566a651c290a {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 680.325737] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Acquiring lock "355aa564-3067-4a3c-92de-4ab6e2b8fa6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.326019] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Lock "355aa564-3067-4a3c-92de-4ab6e2b8fa6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.374387] env[61629]: INFO nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 26366e41-de20-432b-a37e-5abb07c4ff8d] Took 1.03 seconds to deallocate network for instance. [ 680.478590] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Releasing lock "refresh_cache-c332c6fd-1edd-4d9e-85a9-32a408f9d05e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.479053] env[61629]: DEBUG nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 680.479256] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 680.479566] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-26f4adf3-b575-4e6d-bec3-f02b502dc2a9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.490639] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb394d6-d006-433d-ae0a-b60e29bddb84 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.518190] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c332c6fd-1edd-4d9e-85a9-32a408f9d05e could not be found. [ 680.518465] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 680.518659] env[61629]: INFO nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 680.518912] env[61629]: DEBUG oslo.service.loopingcall [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 680.519177] env[61629]: DEBUG nova.compute.manager [-] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 680.519252] env[61629]: DEBUG nova.network.neutron [-] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 680.539518] env[61629]: DEBUG nova.network.neutron [-] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.727689] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.870090] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.042463] env[61629]: DEBUG nova.network.neutron [-] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.239403] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance dd406dd1-0e19-400b-a862-ae51fd134017 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 681.239594] env[61629]: WARNING nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 2315bd37-6151-42d7-8b54-9ee367be0ed1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 681.375795] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Releasing lock "refresh_cache-5670d64c-bddc-4b4a-bdf0-2b039be5e49e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.375795] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 681.375795] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 681.375795] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 681.392528] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.410021] env[61629]: INFO nova.scheduler.client.report [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Deleted allocations for instance 26366e41-de20-432b-a37e-5abb07c4ff8d [ 681.548989] env[61629]: INFO nova.compute.manager [-] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Took 1.03 seconds to deallocate network for instance. [ 681.550537] env[61629]: DEBUG nova.compute.claims [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 681.550856] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.748274] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 26366e41-de20-432b-a37e-5abb07c4ff8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 681.898063] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.914976] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "26366e41-de20-432b-a37e-5abb07c4ff8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.856s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.252918] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 5670d64c-bddc-4b4a-bdf0-2b039be5e49e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 682.253096] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 67534b42-bfab-49a0-922d-8a79a13995db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 682.253237] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance cbcb5b42-06ab-41e4-ad08-d285b0863bfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 682.253357] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 842633ee-19a5-44d6-bdef-c9f81e5af11e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 682.253471] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance c332c6fd-1edd-4d9e-85a9-32a408f9d05e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 682.402202] env[61629]: INFO nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 5670d64c-bddc-4b4a-bdf0-2b039be5e49e] Took 1.03 seconds to deallocate network for instance. [ 682.417192] env[61629]: DEBUG nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 682.756572] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 374062de-1242-44bd-b658-e8976f8c3b6c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 682.939298] env[61629]: DEBUG oslo_concurrency.lockutils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.259804] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 683.440442] env[61629]: INFO nova.scheduler.client.report [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Deleted allocations for instance 5670d64c-bddc-4b4a-bdf0-2b039be5e49e [ 683.763215] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 18b4e8c7-3517-46b2-b0a1-8d17bb222874 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 683.954516] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "5670d64c-bddc-4b4a-bdf0-2b039be5e49e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.853s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.266547] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 54e03464-0f37-4f4d-8746-821e73da0541 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 684.328560] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "67534b42-bfab-49a0-922d-8a79a13995db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.457422] env[61629]: DEBUG nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 684.776393] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 71a5a130-fd26-4cf5-9b27-520f9eb62c55 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 684.977991] env[61629]: DEBUG oslo_concurrency.lockutils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.280042] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 6dd1097f-7353-4938-be2b-51c248e45fe2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 685.335599] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Acquiring lock "28af8dc5-0817-43e7-bce0-3491971efb0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.335974] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Lock "28af8dc5-0817-43e7-bce0-3491971efb0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.783698] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 39f7c5ee-7d07-4516-b008-40d5778cf139 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 686.288679] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 05b868fd-401e-48b7-928f-a39c002bbe71 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 686.792237] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 3a804973-af62-4de1-a4ee-5943209c5884 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.294924] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 012e6d9c-0f02-4761-9639-9a8e8972ea2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.799249] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance ad374170-21a1-4036-9804-b82493701abf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 688.302435] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 688.805998] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 443ad254-3d5d-4fb8-a565-ce70c352e3f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 689.309553] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance d013c1e1-952a-4b76-a44d-8499f5159c42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 689.812391] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance f5830e36-257a-418a-add6-01195bb7d103 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 690.317604] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 52816a66-442f-4869-aee3-0cebd6f5e9bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 690.821077] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance fa8a181b-2170-4c38-98d6-adc4e5a80f94 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.324751] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 853f3cd8-c874-45e8-9e89-ee897dea87a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.828027] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 3b7866fb-213a-46a7-b31c-4ce5598591c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 692.331085] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance c3724b2e-4f6b-4db5-a68f-41e410e561e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 692.835115] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance fe6adbf6-be78-45ee-a136-b7e538fb124b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 693.337316] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 355aa564-3067-4a3c-92de-4ab6e2b8fa6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 693.337447] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 693.337520] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 693.647605] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5b5427-40e0-448b-a639-ef8805c316b9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.655149] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b4f924-c7b4-4964-aa83-6103e0ae0241 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.684459] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eedbb7fd-56b5-4818-9b6b-769a47ac13df {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.691787] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e45cdb6-de69-43d2-80be-815b34f61538 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.704698] env[61629]: DEBUG nova.compute.provider_tree [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.212271] env[61629]: DEBUG nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 694.716517] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61629) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 694.716779] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 14.521s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.717068] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.087s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.717251] env[61629]: DEBUG nova.objects.instance [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61629) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 695.726440] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0f754723-1667-4050-9fdc-108d28d75b74 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.727604] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 31.038s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.556647] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95719fd0-5359-47c6-b608-a08e5d9ddef4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.563585] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e126be04-4ecb-4d48-851e-19e777eb582f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.594259] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e3e049-b7ac-4d5e-af10-19115398c3bf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.601360] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c991680c-c3b0-45d8-9944-ce02ecf9d2d2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.614072] env[61629]: DEBUG nova.compute.provider_tree [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.117815] env[61629]: DEBUG nova.scheduler.client.report [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 697.625672] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.898s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.626361] env[61629]: ERROR nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ba3af36d-c7aa-42c6-8578-0a1e6010675e, please check neutron logs for more information. [ 697.626361] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Traceback (most recent call last): [ 697.626361] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 697.626361] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] self.driver.spawn(context, instance, image_meta, [ 697.626361] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 697.626361] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.626361] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.626361] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] vm_ref = self.build_virtual_machine(instance, [ 697.626361] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.626361] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.626361] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.626663] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] for vif in network_info: [ 697.626663] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 697.626663] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] return self._sync_wrapper(fn, *args, **kwargs) [ 697.626663] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 697.626663] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] self.wait() [ 697.626663] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 697.626663] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] self[:] = self._gt.wait() [ 697.626663] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 697.626663] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] return self._exit_event.wait() [ 697.626663] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 697.626663] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] current.throw(*self._exc) [ 697.626663] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.626663] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] result = function(*args, **kwargs) [ 697.626976] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 697.626976] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] return func(*args, **kwargs) [ 697.626976] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.626976] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] raise e [ 697.626976] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.626976] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] nwinfo = self.network_api.allocate_for_instance( [ 697.626976] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 697.626976] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] created_port_ids = self._update_ports_for_instance( [ 697.626976] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 697.626976] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] with excutils.save_and_reraise_exception(): [ 697.626976] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.626976] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] self.force_reraise() [ 697.626976] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.627325] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] raise self.value [ 697.627325] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 697.627325] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] updated_port = self._update_port( [ 697.627325] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.627325] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] _ensure_no_port_binding_failure(port) [ 697.627325] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.627325] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] raise exception.PortBindingFailed(port_id=port['id']) [ 697.627325] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] nova.exception.PortBindingFailed: Binding failed for port ba3af36d-c7aa-42c6-8578-0a1e6010675e, please check neutron logs for more information. [ 697.627325] env[61629]: ERROR nova.compute.manager [instance: 67534b42-bfab-49a0-922d-8a79a13995db] [ 697.627325] env[61629]: DEBUG nova.compute.utils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Binding failed for port ba3af36d-c7aa-42c6-8578-0a1e6010675e, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 697.630053] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.543s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.631280] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Build of instance 67534b42-bfab-49a0-922d-8a79a13995db was re-scheduled: Binding failed for port ba3af36d-c7aa-42c6-8578-0a1e6010675e, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 697.631702] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 697.631927] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "refresh_cache-67534b42-bfab-49a0-922d-8a79a13995db" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.632088] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquired lock "refresh_cache-67534b42-bfab-49a0-922d-8a79a13995db" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.632248] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 698.152748] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.253742] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.471019] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdebe414-36bb-43c6-8920-025674a3a3dc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.477134] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23f57c4-9368-410c-b4c2-7f94eb2c732b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.507355] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c3f9fa-f843-4c61-8e75-407ef65566f2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.514051] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a322e8-f2be-4e47-a9b0-3b41012c6584 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.526561] env[61629]: DEBUG nova.compute.provider_tree [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.757679] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Releasing lock "refresh_cache-67534b42-bfab-49a0-922d-8a79a13995db" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.757923] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 698.758122] env[61629]: DEBUG nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 698.758295] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 698.773076] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.029402] env[61629]: DEBUG nova.scheduler.client.report [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 699.275511] env[61629]: DEBUG nova.network.neutron [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.534718] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.906s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.535446] env[61629]: ERROR nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b213872d-112d-4e10-92df-607c1c61e42c, please check neutron logs for more information. [ 699.535446] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Traceback (most recent call last): [ 699.535446] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 699.535446] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] self.driver.spawn(context, instance, image_meta, [ 699.535446] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 699.535446] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 699.535446] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 699.535446] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] vm_ref = self.build_virtual_machine(instance, [ 699.535446] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 699.535446] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] vif_infos = vmwarevif.get_vif_info(self._session, [ 699.535446] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 699.535781] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] for vif in network_info: [ 699.535781] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 699.535781] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] return self._sync_wrapper(fn, *args, **kwargs) [ 699.535781] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 699.535781] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] self.wait() [ 699.535781] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 699.535781] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] self[:] = self._gt.wait() [ 699.535781] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 699.535781] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] return self._exit_event.wait() [ 699.535781] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 699.535781] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] current.throw(*self._exc) [ 699.535781] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 699.535781] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] result = function(*args, **kwargs) [ 699.536172] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 699.536172] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] return func(*args, **kwargs) [ 699.536172] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 699.536172] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] raise e [ 699.536172] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 699.536172] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] nwinfo = self.network_api.allocate_for_instance( [ 699.536172] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 699.536172] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] created_port_ids = self._update_ports_for_instance( [ 699.536172] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 699.536172] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] with excutils.save_and_reraise_exception(): [ 699.536172] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 699.536172] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] self.force_reraise() [ 699.536172] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 699.536542] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] raise self.value [ 699.536542] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 699.536542] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] updated_port = self._update_port( [ 699.536542] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 699.536542] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] _ensure_no_port_binding_failure(port) [ 699.536542] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 699.536542] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] raise exception.PortBindingFailed(port_id=port['id']) [ 699.536542] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] nova.exception.PortBindingFailed: Binding failed for port b213872d-112d-4e10-92df-607c1c61e42c, please check neutron logs for more information. [ 699.536542] env[61629]: ERROR nova.compute.manager [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] [ 699.536542] env[61629]: DEBUG nova.compute.utils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Binding failed for port b213872d-112d-4e10-92df-607c1c61e42c, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 699.537426] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.744s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.537596] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.539693] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.722s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.541184] env[61629]: INFO nova.compute.claims [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 699.545869] env[61629]: DEBUG nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Build of instance cbcb5b42-06ab-41e4-ad08-d285b0863bfb was re-scheduled: Binding failed for port b213872d-112d-4e10-92df-607c1c61e42c, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 699.545869] env[61629]: DEBUG nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 699.545869] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "refresh_cache-cbcb5b42-06ab-41e4-ad08-d285b0863bfb" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.545869] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquired lock "refresh_cache-cbcb5b42-06ab-41e4-ad08-d285b0863bfb" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.545869] env[61629]: DEBUG nova.network.neutron [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 699.562526] env[61629]: INFO nova.scheduler.client.report [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Deleted allocations for instance 2315bd37-6151-42d7-8b54-9ee367be0ed1 [ 699.778625] env[61629]: INFO nova.compute.manager [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Took 1.02 seconds to deallocate network for instance. [ 700.064559] env[61629]: DEBUG nova.network.neutron [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.070294] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e93ad8bc-9c8a-4045-a4d6-41875ca83b05 tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "2315bd37-6151-42d7-8b54-9ee367be0ed1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.993s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.157492] env[61629]: DEBUG nova.network.neutron [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.659204] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Releasing lock "refresh_cache-cbcb5b42-06ab-41e4-ad08-d285b0863bfb" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.659413] env[61629]: DEBUG nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 700.659586] env[61629]: DEBUG nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 700.659752] env[61629]: DEBUG nova.network.neutron [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 700.674612] env[61629]: DEBUG nova.network.neutron [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.806102] env[61629]: INFO nova.scheduler.client.report [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Deleted allocations for instance 67534b42-bfab-49a0-922d-8a79a13995db [ 700.882031] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e4f4b5-2e34-46a5-9bf6-6f0e93f290f4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.891204] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99aaa703-4f66-4c39-b710-6e2d1113fc96 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.921031] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510eae1c-c488-47e6-8803-3dcb65aa6076 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.927566] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1642a903-9f2f-4c4e-97f9-370d23521b05 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.933851] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "dd406dd1-0e19-400b-a862-ae51fd134017" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.934109] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "dd406dd1-0e19-400b-a862-ae51fd134017" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.934304] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "dd406dd1-0e19-400b-a862-ae51fd134017-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.934481] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "dd406dd1-0e19-400b-a862-ae51fd134017-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.934643] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "dd406dd1-0e19-400b-a862-ae51fd134017-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.948021] env[61629]: DEBUG nova.compute.provider_tree [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.948021] env[61629]: INFO nova.compute.manager [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Terminating instance [ 700.952087] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "refresh_cache-dd406dd1-0e19-400b-a862-ae51fd134017" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.952246] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquired lock "refresh_cache-dd406dd1-0e19-400b-a862-ae51fd134017" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.952477] env[61629]: DEBUG nova.network.neutron [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 701.178925] env[61629]: DEBUG nova.network.neutron [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.320143] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8e433e88-0afb-49ff-aa76-7d3f3a870e56 tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "67534b42-bfab-49a0-922d-8a79a13995db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 127.195s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.324979] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "67534b42-bfab-49a0-922d-8a79a13995db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 16.997s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.325237] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "67534b42-bfab-49a0-922d-8a79a13995db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.325442] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "67534b42-bfab-49a0-922d-8a79a13995db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.325609] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "67534b42-bfab-49a0-922d-8a79a13995db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.327850] env[61629]: INFO nova.compute.manager [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Terminating instance [ 701.332441] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquiring lock "refresh_cache-67534b42-bfab-49a0-922d-8a79a13995db" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.332606] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Acquired lock "refresh_cache-67534b42-bfab-49a0-922d-8a79a13995db" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.332770] env[61629]: DEBUG nova.network.neutron [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 701.451018] env[61629]: DEBUG nova.scheduler.client.report [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 701.478982] env[61629]: DEBUG nova.network.neutron [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 701.567800] env[61629]: DEBUG nova.network.neutron [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.682022] env[61629]: INFO nova.compute.manager [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: cbcb5b42-06ab-41e4-ad08-d285b0863bfb] Took 1.02 seconds to deallocate network for instance. [ 701.828592] env[61629]: DEBUG nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 701.864667] env[61629]: DEBUG nova.network.neutron [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 701.955211] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.955754] env[61629]: DEBUG nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 701.958998] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.184s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.964712] env[61629]: INFO nova.compute.claims [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 702.008711] env[61629]: DEBUG nova.network.neutron [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.071963] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Releasing lock "refresh_cache-dd406dd1-0e19-400b-a862-ae51fd134017" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.072486] env[61629]: DEBUG nova.compute.manager [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 702.072690] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 702.073642] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adc2c36-147b-4460-8aa0-9eb4c6740afb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.081766] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 702.082296] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b73dc73a-bee6-433b-baf5-8d34676fb99c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.089398] env[61629]: DEBUG oslo_vmware.api [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 702.089398] env[61629]: value = "task-1354053" [ 702.089398] env[61629]: _type = "Task" [ 702.089398] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.096778] env[61629]: DEBUG oslo_vmware.api [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354053, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.350980] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.467163] env[61629]: DEBUG nova.compute.utils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 702.470635] env[61629]: DEBUG nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 702.470872] env[61629]: DEBUG nova.network.neutron [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 702.512099] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Releasing lock "refresh_cache-67534b42-bfab-49a0-922d-8a79a13995db" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.513168] env[61629]: DEBUG nova.compute.manager [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 702.513168] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 702.513168] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be2b3846-fd5f-465f-aa6c-304243d4a603 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.522322] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4ff876-1bbf-48cf-b7a5-bb3355b36689 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.547853] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 67534b42-bfab-49a0-922d-8a79a13995db could not be found. [ 702.548477] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 702.548477] env[61629]: INFO nova.compute.manager [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Took 0.04 seconds to destroy the instance on the hypervisor. [ 702.548477] env[61629]: DEBUG oslo.service.loopingcall [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 702.548733] env[61629]: DEBUG nova.compute.manager [-] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 702.548733] env[61629]: DEBUG nova.network.neutron [-] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 702.560114] env[61629]: DEBUG nova.policy [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bdffb48ef3e14d7994bb9709b1ce3987', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a35cec60cf464a1c9f8215dbc6403a84', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 702.571812] env[61629]: DEBUG nova.network.neutron [-] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 702.599814] env[61629]: DEBUG oslo_vmware.api [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354053, 'name': PowerOffVM_Task, 'duration_secs': 0.129295} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.600101] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 702.600547] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 702.600838] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da30bef3-7a3b-4afe-ba78-6408bb30e307 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.627060] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 702.627327] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 702.627460] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Deleting the datastore file [datastore2] dd406dd1-0e19-400b-a862-ae51fd134017 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 702.627997] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f97c846-58a3-47a5-8196-da1a6a00c1b1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.636551] env[61629]: DEBUG oslo_vmware.api [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for the task: (returnval){ [ 702.636551] env[61629]: value = "task-1354055" [ 702.636551] env[61629]: _type = "Task" [ 702.636551] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.643945] env[61629]: DEBUG oslo_vmware.api [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354055, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.741463] env[61629]: INFO nova.scheduler.client.report [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Deleted allocations for instance cbcb5b42-06ab-41e4-ad08-d285b0863bfb [ 702.975455] env[61629]: DEBUG nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 702.993307] env[61629]: DEBUG nova.network.neutron [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Successfully created port: 5795bb6a-f713-42e8-baca-d885d777dc14 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 703.074113] env[61629]: DEBUG nova.network.neutron [-] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.148978] env[61629]: DEBUG oslo_vmware.api [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Task: {'id': task-1354055, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090067} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.150584] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 703.150848] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 703.150974] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 703.151743] env[61629]: INFO nova.compute.manager [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Took 1.08 seconds to destroy the instance on the hypervisor. [ 703.151743] env[61629]: DEBUG oslo.service.loopingcall [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 703.154246] env[61629]: DEBUG nova.compute.manager [-] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 703.154385] env[61629]: DEBUG nova.network.neutron [-] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 703.186408] env[61629]: DEBUG nova.network.neutron [-] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 703.253532] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ed1c92bd-8ec5-4d26-8b04-03b2aa5463a2 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "cbcb5b42-06ab-41e4-ad08-d285b0863bfb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.697s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.340020] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140efb8f-a133-46b5-98d3-94628b89a6db {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.347418] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36a438d-886f-4b68-bdbf-e1441c940334 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.380103] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0685c183-f7ad-48cb-ba54-95c52c5d8abc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.388082] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a587bca4-0d78-482b-bb96-dc1a0f4ac59d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.401282] env[61629]: DEBUG nova.compute.provider_tree [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.577954] env[61629]: INFO nova.compute.manager [-] [instance: 67534b42-bfab-49a0-922d-8a79a13995db] Took 1.03 seconds to deallocate network for instance. [ 703.688286] env[61629]: DEBUG nova.network.neutron [-] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.755844] env[61629]: DEBUG nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 703.904260] env[61629]: DEBUG nova.scheduler.client.report [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 703.992116] env[61629]: DEBUG nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 704.027343] env[61629]: DEBUG nova.virt.hardware [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 704.027563] env[61629]: DEBUG nova.virt.hardware [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 704.027718] env[61629]: DEBUG nova.virt.hardware [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.027893] env[61629]: DEBUG nova.virt.hardware [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 704.028045] env[61629]: DEBUG nova.virt.hardware [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.028619] env[61629]: DEBUG nova.virt.hardware [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 704.028619] env[61629]: DEBUG nova.virt.hardware [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 704.028619] env[61629]: DEBUG nova.virt.hardware [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 704.028795] env[61629]: DEBUG nova.virt.hardware [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 704.028874] env[61629]: DEBUG nova.virt.hardware [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 704.029683] env[61629]: DEBUG nova.virt.hardware [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 704.029913] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe82766-a5bc-4f0a-9087-e8761e554a66 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.037727] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d2d5ef-2018-4dd3-b29c-68de3b3856b5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.106648] env[61629]: DEBUG nova.compute.manager [req-04de9a12-89ff-4d94-861d-72bde514ee96 req-e7ea7096-97e3-4e31-842f-d0575a948cbd service nova] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Received event network-changed-5795bb6a-f713-42e8-baca-d885d777dc14 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 704.107794] env[61629]: DEBUG nova.compute.manager [req-04de9a12-89ff-4d94-861d-72bde514ee96 req-e7ea7096-97e3-4e31-842f-d0575a948cbd service nova] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Refreshing instance network info cache due to event network-changed-5795bb6a-f713-42e8-baca-d885d777dc14. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 704.107794] env[61629]: DEBUG oslo_concurrency.lockutils [req-04de9a12-89ff-4d94-861d-72bde514ee96 req-e7ea7096-97e3-4e31-842f-d0575a948cbd service nova] Acquiring lock "refresh_cache-374062de-1242-44bd-b658-e8976f8c3b6c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.107794] env[61629]: DEBUG oslo_concurrency.lockutils [req-04de9a12-89ff-4d94-861d-72bde514ee96 req-e7ea7096-97e3-4e31-842f-d0575a948cbd service nova] Acquired lock "refresh_cache-374062de-1242-44bd-b658-e8976f8c3b6c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.107794] env[61629]: DEBUG nova.network.neutron [req-04de9a12-89ff-4d94-861d-72bde514ee96 req-e7ea7096-97e3-4e31-842f-d0575a948cbd service nova] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Refreshing network info cache for port 5795bb6a-f713-42e8-baca-d885d777dc14 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 704.184598] env[61629]: ERROR nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5795bb6a-f713-42e8-baca-d885d777dc14, please check neutron logs for more information. [ 704.184598] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 704.184598] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.184598] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 704.184598] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 704.184598] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 704.184598] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 704.184598] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 704.184598] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.184598] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 704.184598] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.184598] env[61629]: ERROR nova.compute.manager raise self.value [ 704.184598] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 704.184598] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 704.184598] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.184598] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 704.185164] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.185164] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 704.185164] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5795bb6a-f713-42e8-baca-d885d777dc14, please check neutron logs for more information. [ 704.185164] env[61629]: ERROR nova.compute.manager [ 704.185164] env[61629]: Traceback (most recent call last): [ 704.185164] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 704.185164] env[61629]: listener.cb(fileno) [ 704.185164] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 704.185164] env[61629]: result = function(*args, **kwargs) [ 704.185164] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 704.185164] env[61629]: return func(*args, **kwargs) [ 704.185164] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 704.185164] env[61629]: raise e [ 704.185164] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.185164] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 704.185164] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 704.185164] env[61629]: created_port_ids = self._update_ports_for_instance( [ 704.185164] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 704.185164] env[61629]: with excutils.save_and_reraise_exception(): [ 704.185164] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.185164] env[61629]: self.force_reraise() [ 704.185164] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.185164] env[61629]: raise self.value [ 704.185164] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 704.185164] env[61629]: updated_port = self._update_port( [ 704.185164] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.185164] env[61629]: _ensure_no_port_binding_failure(port) [ 704.185164] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.185164] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 704.186022] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 5795bb6a-f713-42e8-baca-d885d777dc14, please check neutron logs for more information. [ 704.186022] env[61629]: Removing descriptor: 21 [ 704.186022] env[61629]: ERROR nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5795bb6a-f713-42e8-baca-d885d777dc14, please check neutron logs for more information. [ 704.186022] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Traceback (most recent call last): [ 704.186022] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 704.186022] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] yield resources [ 704.186022] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 704.186022] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] self.driver.spawn(context, instance, image_meta, [ 704.186022] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 704.186022] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 704.186022] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 704.186022] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] vm_ref = self.build_virtual_machine(instance, [ 704.186388] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 704.186388] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] vif_infos = vmwarevif.get_vif_info(self._session, [ 704.186388] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 704.186388] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] for vif in network_info: [ 704.186388] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 704.186388] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] return self._sync_wrapper(fn, *args, **kwargs) [ 704.186388] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 704.186388] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] self.wait() [ 704.186388] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 704.186388] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] self[:] = self._gt.wait() [ 704.186388] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 704.186388] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] return self._exit_event.wait() [ 704.186388] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 704.186855] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] result = hub.switch() [ 704.186855] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 704.186855] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] return self.greenlet.switch() [ 704.186855] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 704.186855] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] result = function(*args, **kwargs) [ 704.186855] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 704.186855] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] return func(*args, **kwargs) [ 704.186855] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 704.186855] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] raise e [ 704.186855] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.186855] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] nwinfo = self.network_api.allocate_for_instance( [ 704.186855] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 704.186855] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] created_port_ids = self._update_ports_for_instance( [ 704.187593] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 704.187593] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] with excutils.save_and_reraise_exception(): [ 704.187593] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.187593] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] self.force_reraise() [ 704.187593] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.187593] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] raise self.value [ 704.187593] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 704.187593] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] updated_port = self._update_port( [ 704.187593] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.187593] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] _ensure_no_port_binding_failure(port) [ 704.187593] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.187593] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] raise exception.PortBindingFailed(port_id=port['id']) [ 704.188296] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] nova.exception.PortBindingFailed: Binding failed for port 5795bb6a-f713-42e8-baca-d885d777dc14, please check neutron logs for more information. [ 704.188296] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] [ 704.188296] env[61629]: INFO nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Terminating instance [ 704.188296] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "refresh_cache-374062de-1242-44bd-b658-e8976f8c3b6c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.191191] env[61629]: INFO nova.compute.manager [-] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Took 1.04 seconds to deallocate network for instance. [ 704.277881] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.410402] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.451s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.410951] env[61629]: DEBUG nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 704.413702] env[61629]: DEBUG oslo_concurrency.lockutils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.692s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.465259] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "c5c6854c-1fe6-46e7-aee7-6a5e00d6027c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.465491] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "c5c6854c-1fe6-46e7-aee7-6a5e00d6027c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.606026] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fd2931df-e68c-4aec-840c-d4ca1599c4af tempest-ListServersNegativeTestJSON-68607468 tempest-ListServersNegativeTestJSON-68607468-project-member] Lock "67534b42-bfab-49a0-922d-8a79a13995db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.281s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.624190] env[61629]: DEBUG nova.network.neutron [req-04de9a12-89ff-4d94-861d-72bde514ee96 req-e7ea7096-97e3-4e31-842f-d0575a948cbd service nova] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.697532] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.715161] env[61629]: DEBUG nova.network.neutron [req-04de9a12-89ff-4d94-861d-72bde514ee96 req-e7ea7096-97e3-4e31-842f-d0575a948cbd service nova] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.918247] env[61629]: DEBUG nova.compute.utils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 704.924136] env[61629]: DEBUG nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 704.924194] env[61629]: DEBUG nova.network.neutron [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 704.985557] env[61629]: DEBUG nova.policy [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a214e72fbab540c1999dcabf95626cc1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae7d7eafdec947e182449d92a6afa07e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 705.217755] env[61629]: DEBUG oslo_concurrency.lockutils [req-04de9a12-89ff-4d94-861d-72bde514ee96 req-e7ea7096-97e3-4e31-842f-d0575a948cbd service nova] Releasing lock "refresh_cache-374062de-1242-44bd-b658-e8976f8c3b6c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.218173] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquired lock "refresh_cache-374062de-1242-44bd-b658-e8976f8c3b6c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.218347] env[61629]: DEBUG nova.network.neutron [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 705.294652] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21570621-2ca4-4940-9bb2-86d0cda39156 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.303295] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431bcb3b-28e6-4da8-baed-682158944e15 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.335026] env[61629]: DEBUG nova.network.neutron [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Successfully created port: 61771b60-a29f-4695-b630-b1cc0dd7ad27 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 705.337400] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de702fd-90cd-417e-bc6b-39316597f29c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.345780] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be62350-be6f-4811-9d0a-68902ca79465 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.359167] env[61629]: DEBUG nova.compute.provider_tree [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.424496] env[61629]: DEBUG nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 705.736520] env[61629]: DEBUG nova.network.neutron [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.844701] env[61629]: DEBUG nova.network.neutron [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.862609] env[61629]: DEBUG nova.scheduler.client.report [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 706.135783] env[61629]: DEBUG nova.compute.manager [req-0c5141ea-780f-40eb-9a0f-c1fb01571ea8 req-b2ba5926-1ec1-484b-bd68-bc90779ad4e3 service nova] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Received event network-vif-deleted-5795bb6a-f713-42e8-baca-d885d777dc14 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 706.349296] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Releasing lock "refresh_cache-374062de-1242-44bd-b658-e8976f8c3b6c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.349943] env[61629]: DEBUG nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 706.349943] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 706.353294] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1226d447-cab8-47b1-8ce8-b7cdcf512d4a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.361495] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944889d7-d006-422e-acea-02442eee14f8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.374136] env[61629]: DEBUG oslo_concurrency.lockutils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.959s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.374136] env[61629]: ERROR nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6553eeaf-691c-4f78-b738-44600d6fd47f, please check neutron logs for more information. [ 706.374136] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Traceback (most recent call last): [ 706.374136] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 706.374136] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] self.driver.spawn(context, instance, image_meta, [ 706.374136] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 706.374136] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 706.374136] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 706.374136] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] vm_ref = self.build_virtual_machine(instance, [ 706.374378] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 706.374378] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] vif_infos = vmwarevif.get_vif_info(self._session, [ 706.374378] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 706.374378] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] for vif in network_info: [ 706.374378] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 706.374378] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] return self._sync_wrapper(fn, *args, **kwargs) [ 706.374378] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 706.374378] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] self.wait() [ 706.374378] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 706.374378] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] self[:] = self._gt.wait() [ 706.374378] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 706.374378] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] return self._exit_event.wait() [ 706.374378] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 706.374690] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] current.throw(*self._exc) [ 706.374690] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 706.374690] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] result = function(*args, **kwargs) [ 706.374690] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 706.374690] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] return func(*args, **kwargs) [ 706.374690] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 706.374690] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] raise e [ 706.374690] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 706.374690] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] nwinfo = self.network_api.allocate_for_instance( [ 706.374690] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 706.374690] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] created_port_ids = self._update_ports_for_instance( [ 706.374690] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 706.374690] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] with excutils.save_and_reraise_exception(): [ 706.375041] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.375041] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] self.force_reraise() [ 706.375041] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.375041] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] raise self.value [ 706.375041] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 706.375041] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] updated_port = self._update_port( [ 706.375041] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.375041] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] _ensure_no_port_binding_failure(port) [ 706.375041] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.375041] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] raise exception.PortBindingFailed(port_id=port['id']) [ 706.375041] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] nova.exception.PortBindingFailed: Binding failed for port 6553eeaf-691c-4f78-b738-44600d6fd47f, please check neutron logs for more information. [ 706.375041] env[61629]: ERROR nova.compute.manager [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] [ 706.375340] env[61629]: DEBUG nova.compute.utils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Binding failed for port 6553eeaf-691c-4f78-b738-44600d6fd47f, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 706.376032] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.022s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.376978] env[61629]: INFO nova.compute.claims [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 706.379982] env[61629]: DEBUG nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Build of instance 842633ee-19a5-44d6-bdef-c9f81e5af11e was re-scheduled: Binding failed for port 6553eeaf-691c-4f78-b738-44600d6fd47f, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 706.380384] env[61629]: DEBUG nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 706.380650] env[61629]: DEBUG oslo_concurrency.lockutils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Acquiring lock "refresh_cache-842633ee-19a5-44d6-bdef-c9f81e5af11e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.380764] env[61629]: DEBUG oslo_concurrency.lockutils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Acquired lock "refresh_cache-842633ee-19a5-44d6-bdef-c9f81e5af11e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.381046] env[61629]: DEBUG nova.network.neutron [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 706.390608] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 374062de-1242-44bd-b658-e8976f8c3b6c could not be found. [ 706.390827] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 706.391031] env[61629]: INFO nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 706.391294] env[61629]: DEBUG oslo.service.loopingcall [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 706.392137] env[61629]: DEBUG nova.compute.manager [-] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 706.392230] env[61629]: DEBUG nova.network.neutron [-] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 706.415899] env[61629]: DEBUG nova.network.neutron [-] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.435536] env[61629]: DEBUG nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 706.463826] env[61629]: DEBUG nova.virt.hardware [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 706.464066] env[61629]: DEBUG nova.virt.hardware [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 706.464221] env[61629]: DEBUG nova.virt.hardware [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.464393] env[61629]: DEBUG nova.virt.hardware [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 706.464539] env[61629]: DEBUG nova.virt.hardware [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.464680] env[61629]: DEBUG nova.virt.hardware [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 706.464877] env[61629]: DEBUG nova.virt.hardware [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 706.465057] env[61629]: DEBUG nova.virt.hardware [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 706.465232] env[61629]: DEBUG nova.virt.hardware [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 706.465386] env[61629]: DEBUG nova.virt.hardware [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 706.465551] env[61629]: DEBUG nova.virt.hardware [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 706.466564] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446fb024-a9c9-4633-b006-d45ee0fcc7ce {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.474489] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fac466e-17a1-4ee4-a46e-8fce035b3f38 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.479599] env[61629]: ERROR nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 61771b60-a29f-4695-b630-b1cc0dd7ad27, please check neutron logs for more information. [ 706.479599] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 706.479599] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 706.479599] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 706.479599] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 706.479599] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 706.479599] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 706.479599] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 706.479599] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.479599] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 706.479599] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.479599] env[61629]: ERROR nova.compute.manager raise self.value [ 706.479599] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 706.479599] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 706.479599] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.479599] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 706.480146] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.480146] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 706.480146] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 61771b60-a29f-4695-b630-b1cc0dd7ad27, please check neutron logs for more information. [ 706.480146] env[61629]: ERROR nova.compute.manager [ 706.480146] env[61629]: Traceback (most recent call last): [ 706.480146] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 706.480146] env[61629]: listener.cb(fileno) [ 706.480146] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 706.480146] env[61629]: result = function(*args, **kwargs) [ 706.480146] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 706.480146] env[61629]: return func(*args, **kwargs) [ 706.480146] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 706.480146] env[61629]: raise e [ 706.480146] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 706.480146] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 706.480146] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 706.480146] env[61629]: created_port_ids = self._update_ports_for_instance( [ 706.480146] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 706.480146] env[61629]: with excutils.save_and_reraise_exception(): [ 706.480146] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.480146] env[61629]: self.force_reraise() [ 706.480146] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.480146] env[61629]: raise self.value [ 706.480146] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 706.480146] env[61629]: updated_port = self._update_port( [ 706.480146] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.480146] env[61629]: _ensure_no_port_binding_failure(port) [ 706.480146] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.480146] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 706.480957] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 61771b60-a29f-4695-b630-b1cc0dd7ad27, please check neutron logs for more information. [ 706.480957] env[61629]: Removing descriptor: 21 [ 706.491233] env[61629]: ERROR nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 61771b60-a29f-4695-b630-b1cc0dd7ad27, please check neutron logs for more information. [ 706.491233] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Traceback (most recent call last): [ 706.491233] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 706.491233] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] yield resources [ 706.491233] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 706.491233] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] self.driver.spawn(context, instance, image_meta, [ 706.491233] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 706.491233] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 706.491233] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 706.491233] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] vm_ref = self.build_virtual_machine(instance, [ 706.491233] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 706.491601] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] vif_infos = vmwarevif.get_vif_info(self._session, [ 706.491601] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 706.491601] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] for vif in network_info: [ 706.491601] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 706.491601] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] return self._sync_wrapper(fn, *args, **kwargs) [ 706.491601] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 706.491601] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] self.wait() [ 706.491601] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 706.491601] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] self[:] = self._gt.wait() [ 706.491601] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 706.491601] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] return self._exit_event.wait() [ 706.491601] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 706.491601] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] current.throw(*self._exc) [ 706.491998] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 706.491998] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] result = function(*args, **kwargs) [ 706.491998] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 706.491998] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] return func(*args, **kwargs) [ 706.491998] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 706.491998] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] raise e [ 706.491998] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 706.491998] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] nwinfo = self.network_api.allocate_for_instance( [ 706.491998] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 706.491998] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] created_port_ids = self._update_ports_for_instance( [ 706.491998] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 706.491998] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] with excutils.save_and_reraise_exception(): [ 706.491998] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.492370] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] self.force_reraise() [ 706.492370] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.492370] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] raise self.value [ 706.492370] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 706.492370] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] updated_port = self._update_port( [ 706.492370] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.492370] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] _ensure_no_port_binding_failure(port) [ 706.492370] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.492370] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] raise exception.PortBindingFailed(port_id=port['id']) [ 706.492370] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] nova.exception.PortBindingFailed: Binding failed for port 61771b60-a29f-4695-b630-b1cc0dd7ad27, please check neutron logs for more information. [ 706.492370] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] [ 706.492370] env[61629]: INFO nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Terminating instance [ 706.493586] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Acquiring lock "refresh_cache-395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.493748] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Acquired lock "refresh_cache-395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.493910] env[61629]: DEBUG nova.network.neutron [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 706.899251] env[61629]: DEBUG nova.network.neutron [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.918636] env[61629]: DEBUG nova.network.neutron [-] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.979693] env[61629]: DEBUG nova.network.neutron [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.008874] env[61629]: DEBUG nova.network.neutron [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.076935] env[61629]: DEBUG nova.network.neutron [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.421714] env[61629]: INFO nova.compute.manager [-] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Took 1.03 seconds to deallocate network for instance. [ 707.426960] env[61629]: DEBUG nova.compute.claims [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 707.427237] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.483431] env[61629]: DEBUG oslo_concurrency.lockutils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Releasing lock "refresh_cache-842633ee-19a5-44d6-bdef-c9f81e5af11e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.483654] env[61629]: DEBUG nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 707.483832] env[61629]: DEBUG nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 707.485029] env[61629]: DEBUG nova.network.neutron [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 707.499265] env[61629]: DEBUG nova.network.neutron [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.580864] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Releasing lock "refresh_cache-395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.581335] env[61629]: DEBUG nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 707.581556] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 707.583952] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-183beb4b-16a0-4f4b-94c7-be10b1a6ce10 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.593205] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12ffd82-ac04-48f5-9e37-ddbec2e101d9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.616676] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa could not be found. [ 707.616903] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 707.617095] env[61629]: INFO nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Took 0.04 seconds to destroy the instance on the hypervisor. [ 707.617336] env[61629]: DEBUG oslo.service.loopingcall [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 707.619619] env[61629]: DEBUG nova.compute.manager [-] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 707.619723] env[61629]: DEBUG nova.network.neutron [-] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 707.637071] env[61629]: DEBUG nova.network.neutron [-] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.729328] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b3e24b-ed90-4d0a-ae02-489ebebcb1b9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.737024] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c673b7f6-2a5c-4ca0-bec3-8c1964e3b5a4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.767739] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ac4707-c819-4334-a293-5e858272e8ad {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.776185] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2159924-c601-40d5-b844-65503027441b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.788880] env[61629]: DEBUG nova.compute.provider_tree [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.002078] env[61629]: DEBUG nova.network.neutron [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.139461] env[61629]: DEBUG nova.network.neutron [-] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.170927] env[61629]: DEBUG nova.compute.manager [req-754de49f-47c4-4dd0-94c1-9c40909dbb16 req-7ee62346-b035-4c77-8cd7-a497f25714d4 service nova] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Received event network-changed-61771b60-a29f-4695-b630-b1cc0dd7ad27 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 708.171142] env[61629]: DEBUG nova.compute.manager [req-754de49f-47c4-4dd0-94c1-9c40909dbb16 req-7ee62346-b035-4c77-8cd7-a497f25714d4 service nova] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Refreshing instance network info cache due to event network-changed-61771b60-a29f-4695-b630-b1cc0dd7ad27. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 708.171351] env[61629]: DEBUG oslo_concurrency.lockutils [req-754de49f-47c4-4dd0-94c1-9c40909dbb16 req-7ee62346-b035-4c77-8cd7-a497f25714d4 service nova] Acquiring lock "refresh_cache-395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.171490] env[61629]: DEBUG oslo_concurrency.lockutils [req-754de49f-47c4-4dd0-94c1-9c40909dbb16 req-7ee62346-b035-4c77-8cd7-a497f25714d4 service nova] Acquired lock "refresh_cache-395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.171710] env[61629]: DEBUG nova.network.neutron [req-754de49f-47c4-4dd0-94c1-9c40909dbb16 req-7ee62346-b035-4c77-8cd7-a497f25714d4 service nova] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Refreshing network info cache for port 61771b60-a29f-4695-b630-b1cc0dd7ad27 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 708.292531] env[61629]: DEBUG nova.scheduler.client.report [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 708.505603] env[61629]: INFO nova.compute.manager [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] [instance: 842633ee-19a5-44d6-bdef-c9f81e5af11e] Took 1.02 seconds to deallocate network for instance. [ 708.641952] env[61629]: INFO nova.compute.manager [-] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Took 1.02 seconds to deallocate network for instance. [ 708.645255] env[61629]: DEBUG nova.compute.claims [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 708.645443] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.688359] env[61629]: DEBUG nova.network.neutron [req-754de49f-47c4-4dd0-94c1-9c40909dbb16 req-7ee62346-b035-4c77-8cd7-a497f25714d4 service nova] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 708.777314] env[61629]: DEBUG nova.network.neutron [req-754de49f-47c4-4dd0-94c1-9c40909dbb16 req-7ee62346-b035-4c77-8cd7-a497f25714d4 service nova] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.796924] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.421s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.797433] env[61629]: DEBUG nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 708.799811] env[61629]: DEBUG oslo_concurrency.lockutils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.464s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.801285] env[61629]: INFO nova.compute.claims [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 709.279616] env[61629]: DEBUG oslo_concurrency.lockutils [req-754de49f-47c4-4dd0-94c1-9c40909dbb16 req-7ee62346-b035-4c77-8cd7-a497f25714d4 service nova] Releasing lock "refresh_cache-395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.279881] env[61629]: DEBUG nova.compute.manager [req-754de49f-47c4-4dd0-94c1-9c40909dbb16 req-7ee62346-b035-4c77-8cd7-a497f25714d4 service nova] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Received event network-vif-deleted-61771b60-a29f-4695-b630-b1cc0dd7ad27 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 709.307424] env[61629]: DEBUG nova.compute.utils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 709.308658] env[61629]: DEBUG nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 709.308823] env[61629]: DEBUG nova.network.neutron [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 709.354707] env[61629]: DEBUG nova.policy [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '45ee1f3fcec646f8a9e9b9fafb6ec362', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2cb2918e9e904d34bf966e93e3a427f7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 709.533667] env[61629]: INFO nova.scheduler.client.report [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Deleted allocations for instance 842633ee-19a5-44d6-bdef-c9f81e5af11e [ 709.772139] env[61629]: DEBUG nova.network.neutron [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Successfully created port: 914c32fe-3e51-4b60-92d0-6d1abe056601 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 709.812276] env[61629]: DEBUG nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 710.045562] env[61629]: DEBUG oslo_concurrency.lockutils [None req-99ecb0c5-0fa9-4ff6-afc7-115101a9f21b tempest-InstanceActionsV221TestJSON-1257281624 tempest-InstanceActionsV221TestJSON-1257281624-project-member] Lock "842633ee-19a5-44d6-bdef-c9f81e5af11e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.117s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.179087] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e71866-96b1-40c1-b811-e9efe76b6214 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.188461] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a052193-e798-4387-ad83-2ffac9a8edd7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.219020] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a960802-00ba-47c7-8343-1715af32ad17 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.226308] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70d08fa-42d1-4f20-a9bb-2519fec824b6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.241721] env[61629]: DEBUG nova.compute.provider_tree [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.548337] env[61629]: DEBUG nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 710.614475] env[61629]: DEBUG nova.compute.manager [req-96b70aed-876d-4daf-8835-086d8862a2bb req-c7de0d2d-587c-47c2-a0b4-200b0e89833f service nova] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Received event network-changed-914c32fe-3e51-4b60-92d0-6d1abe056601 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 710.614773] env[61629]: DEBUG nova.compute.manager [req-96b70aed-876d-4daf-8835-086d8862a2bb req-c7de0d2d-587c-47c2-a0b4-200b0e89833f service nova] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Refreshing instance network info cache due to event network-changed-914c32fe-3e51-4b60-92d0-6d1abe056601. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 710.615205] env[61629]: DEBUG oslo_concurrency.lockutils [req-96b70aed-876d-4daf-8835-086d8862a2bb req-c7de0d2d-587c-47c2-a0b4-200b0e89833f service nova] Acquiring lock "refresh_cache-18b4e8c7-3517-46b2-b0a1-8d17bb222874" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.615395] env[61629]: DEBUG oslo_concurrency.lockutils [req-96b70aed-876d-4daf-8835-086d8862a2bb req-c7de0d2d-587c-47c2-a0b4-200b0e89833f service nova] Acquired lock "refresh_cache-18b4e8c7-3517-46b2-b0a1-8d17bb222874" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.615658] env[61629]: DEBUG nova.network.neutron [req-96b70aed-876d-4daf-8835-086d8862a2bb req-c7de0d2d-587c-47c2-a0b4-200b0e89833f service nova] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Refreshing network info cache for port 914c32fe-3e51-4b60-92d0-6d1abe056601 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 710.745328] env[61629]: DEBUG nova.scheduler.client.report [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 710.765738] env[61629]: ERROR nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 914c32fe-3e51-4b60-92d0-6d1abe056601, please check neutron logs for more information. [ 710.765738] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 710.765738] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 710.765738] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 710.765738] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 710.765738] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 710.765738] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 710.765738] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 710.765738] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 710.765738] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 710.765738] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 710.765738] env[61629]: ERROR nova.compute.manager raise self.value [ 710.765738] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 710.765738] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 710.765738] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 710.765738] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 710.768237] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 710.768237] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 710.768237] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 914c32fe-3e51-4b60-92d0-6d1abe056601, please check neutron logs for more information. [ 710.768237] env[61629]: ERROR nova.compute.manager [ 710.768237] env[61629]: Traceback (most recent call last): [ 710.768237] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 710.768237] env[61629]: listener.cb(fileno) [ 710.768237] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 710.768237] env[61629]: result = function(*args, **kwargs) [ 710.768237] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 710.768237] env[61629]: return func(*args, **kwargs) [ 710.768237] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 710.768237] env[61629]: raise e [ 710.768237] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 710.768237] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 710.768237] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 710.768237] env[61629]: created_port_ids = self._update_ports_for_instance( [ 710.768237] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 710.768237] env[61629]: with excutils.save_and_reraise_exception(): [ 710.768237] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 710.768237] env[61629]: self.force_reraise() [ 710.768237] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 710.768237] env[61629]: raise self.value [ 710.768237] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 710.768237] env[61629]: updated_port = self._update_port( [ 710.768237] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 710.768237] env[61629]: _ensure_no_port_binding_failure(port) [ 710.768237] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 710.768237] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 710.769067] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 914c32fe-3e51-4b60-92d0-6d1abe056601, please check neutron logs for more information. [ 710.769067] env[61629]: Removing descriptor: 21 [ 710.824020] env[61629]: DEBUG nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 710.848066] env[61629]: DEBUG nova.virt.hardware [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 710.848333] env[61629]: DEBUG nova.virt.hardware [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 710.848491] env[61629]: DEBUG nova.virt.hardware [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 710.848667] env[61629]: DEBUG nova.virt.hardware [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 710.848812] env[61629]: DEBUG nova.virt.hardware [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 710.848953] env[61629]: DEBUG nova.virt.hardware [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 710.849369] env[61629]: DEBUG nova.virt.hardware [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 710.849547] env[61629]: DEBUG nova.virt.hardware [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 710.850178] env[61629]: DEBUG nova.virt.hardware [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 710.850357] env[61629]: DEBUG nova.virt.hardware [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 710.850559] env[61629]: DEBUG nova.virt.hardware [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 710.851452] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a4e3fd-1fd6-4dfc-a326-dd1ae31f7306 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.860860] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c076ba-fd6a-4fc5-b0a7-8cabb7a31356 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.874892] env[61629]: ERROR nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 914c32fe-3e51-4b60-92d0-6d1abe056601, please check neutron logs for more information. [ 710.874892] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Traceback (most recent call last): [ 710.874892] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 710.874892] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] yield resources [ 710.874892] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 710.874892] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] self.driver.spawn(context, instance, image_meta, [ 710.874892] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 710.874892] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] self._vmops.spawn(context, instance, image_meta, injected_files, [ 710.874892] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 710.874892] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] vm_ref = self.build_virtual_machine(instance, [ 710.874892] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 710.875316] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] vif_infos = vmwarevif.get_vif_info(self._session, [ 710.875316] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 710.875316] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] for vif in network_info: [ 710.875316] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 710.875316] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] return self._sync_wrapper(fn, *args, **kwargs) [ 710.875316] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 710.875316] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] self.wait() [ 710.875316] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 710.875316] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] self[:] = self._gt.wait() [ 710.875316] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 710.875316] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] return self._exit_event.wait() [ 710.875316] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 710.875316] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] current.throw(*self._exc) [ 710.875701] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 710.875701] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] result = function(*args, **kwargs) [ 710.875701] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 710.875701] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] return func(*args, **kwargs) [ 710.875701] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 710.875701] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] raise e [ 710.875701] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 710.875701] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] nwinfo = self.network_api.allocate_for_instance( [ 710.875701] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 710.875701] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] created_port_ids = self._update_ports_for_instance( [ 710.875701] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 710.875701] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] with excutils.save_and_reraise_exception(): [ 710.875701] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 710.876098] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] self.force_reraise() [ 710.876098] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 710.876098] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] raise self.value [ 710.876098] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 710.876098] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] updated_port = self._update_port( [ 710.876098] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 710.876098] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] _ensure_no_port_binding_failure(port) [ 710.876098] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 710.876098] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] raise exception.PortBindingFailed(port_id=port['id']) [ 710.876098] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] nova.exception.PortBindingFailed: Binding failed for port 914c32fe-3e51-4b60-92d0-6d1abe056601, please check neutron logs for more information. [ 710.876098] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] [ 710.876098] env[61629]: INFO nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Terminating instance [ 710.877832] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Acquiring lock "refresh_cache-18b4e8c7-3517-46b2-b0a1-8d17bb222874" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.089674] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.131459] env[61629]: DEBUG nova.network.neutron [req-96b70aed-876d-4daf-8835-086d8862a2bb req-c7de0d2d-587c-47c2-a0b4-200b0e89833f service nova] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.195452] env[61629]: DEBUG nova.network.neutron [req-96b70aed-876d-4daf-8835-086d8862a2bb req-c7de0d2d-587c-47c2-a0b4-200b0e89833f service nova] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.251600] env[61629]: DEBUG oslo_concurrency.lockutils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.251966] env[61629]: DEBUG nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 711.254978] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.704s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.700253] env[61629]: DEBUG oslo_concurrency.lockutils [req-96b70aed-876d-4daf-8835-086d8862a2bb req-c7de0d2d-587c-47c2-a0b4-200b0e89833f service nova] Releasing lock "refresh_cache-18b4e8c7-3517-46b2-b0a1-8d17bb222874" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.700800] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Acquired lock "refresh_cache-18b4e8c7-3517-46b2-b0a1-8d17bb222874" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.700800] env[61629]: DEBUG nova.network.neutron [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.762465] env[61629]: DEBUG nova.compute.utils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 711.764564] env[61629]: DEBUG nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 711.764735] env[61629]: DEBUG nova.network.neutron [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 711.838842] env[61629]: DEBUG nova.policy [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be81178f7a914988a54581c283e2e76a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6d1f876ee054beb89ca0eb0776ddcd5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 712.131176] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc22a15-8c9a-4ba2-99fc-1d7a495b66ba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.138026] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79d07a8-dea6-48e2-b91b-d8cd4661e365 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.141921] env[61629]: DEBUG nova.network.neutron [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Successfully created port: 1edcb2db-1404-4ebb-a079-e7a025dc6acf {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 712.175169] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0485018b-b198-4587-b502-f2d3780e527e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.183205] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cdfc53-d6b6-4021-ba9f-330c5774be5d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.196706] env[61629]: DEBUG nova.compute.provider_tree [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.227421] env[61629]: DEBUG nova.network.neutron [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.267953] env[61629]: DEBUG nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 712.303904] env[61629]: DEBUG nova.network.neutron [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.662042] env[61629]: DEBUG nova.compute.manager [req-88258d97-ca50-410f-b353-b488acb5c407 req-8e4b5c20-48a9-41af-82de-198f0546ea3d service nova] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Received event network-vif-deleted-914c32fe-3e51-4b60-92d0-6d1abe056601 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 712.699838] env[61629]: DEBUG nova.scheduler.client.report [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 712.807022] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Releasing lock "refresh_cache-18b4e8c7-3517-46b2-b0a1-8d17bb222874" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.808973] env[61629]: DEBUG nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 712.808973] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 712.809084] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4cf5ad97-cc21-4253-8d9d-2f71f0b3bd07 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.818504] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e95f8f-6a35-4323-8bec-a668736deedf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.840745] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 18b4e8c7-3517-46b2-b0a1-8d17bb222874 could not be found. [ 712.841350] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 712.841350] env[61629]: INFO nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Took 0.03 seconds to destroy the instance on the hypervisor. [ 712.841466] env[61629]: DEBUG oslo.service.loopingcall [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 712.841573] env[61629]: DEBUG nova.compute.manager [-] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 712.841667] env[61629]: DEBUG nova.network.neutron [-] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 712.860333] env[61629]: DEBUG nova.network.neutron [-] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.115797] env[61629]: ERROR nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1edcb2db-1404-4ebb-a079-e7a025dc6acf, please check neutron logs for more information. [ 713.115797] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 713.115797] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 713.115797] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 713.115797] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 713.115797] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 713.115797] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 713.115797] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 713.115797] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 713.115797] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 713.115797] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 713.115797] env[61629]: ERROR nova.compute.manager raise self.value [ 713.115797] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 713.115797] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 713.115797] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 713.115797] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 713.116270] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 713.116270] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 713.116270] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1edcb2db-1404-4ebb-a079-e7a025dc6acf, please check neutron logs for more information. [ 713.116270] env[61629]: ERROR nova.compute.manager [ 713.116270] env[61629]: Traceback (most recent call last): [ 713.116270] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 713.116270] env[61629]: listener.cb(fileno) [ 713.116270] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 713.116270] env[61629]: result = function(*args, **kwargs) [ 713.116270] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 713.116270] env[61629]: return func(*args, **kwargs) [ 713.116270] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 713.116270] env[61629]: raise e [ 713.116270] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 713.116270] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 713.116270] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 713.116270] env[61629]: created_port_ids = self._update_ports_for_instance( [ 713.116270] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 713.116270] env[61629]: with excutils.save_and_reraise_exception(): [ 713.116270] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 713.116270] env[61629]: self.force_reraise() [ 713.116270] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 713.116270] env[61629]: raise self.value [ 713.116270] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 713.116270] env[61629]: updated_port = self._update_port( [ 713.116270] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 713.116270] env[61629]: _ensure_no_port_binding_failure(port) [ 713.116270] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 713.116270] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 713.117247] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 1edcb2db-1404-4ebb-a079-e7a025dc6acf, please check neutron logs for more information. [ 713.117247] env[61629]: Removing descriptor: 21 [ 713.163120] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Acquiring lock "b8cfaef2-5f78-4026-90b8-fe2adacd61e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.163324] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Lock "b8cfaef2-5f78-4026-90b8-fe2adacd61e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.204488] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.949s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.205015] env[61629]: ERROR nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b80aeac8-416d-4020-9230-566a651c290a, please check neutron logs for more information. [ 713.205015] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Traceback (most recent call last): [ 713.205015] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 713.205015] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] self.driver.spawn(context, instance, image_meta, [ 713.205015] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 713.205015] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 713.205015] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 713.205015] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] vm_ref = self.build_virtual_machine(instance, [ 713.205015] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 713.205015] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] vif_infos = vmwarevif.get_vif_info(self._session, [ 713.205015] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 713.205384] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] for vif in network_info: [ 713.205384] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 713.205384] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] return self._sync_wrapper(fn, *args, **kwargs) [ 713.205384] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 713.205384] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] self.wait() [ 713.205384] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 713.205384] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] self[:] = self._gt.wait() [ 713.205384] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 713.205384] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] return self._exit_event.wait() [ 713.205384] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 713.205384] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] current.throw(*self._exc) [ 713.205384] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 713.205384] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] result = function(*args, **kwargs) [ 713.205829] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 713.205829] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] return func(*args, **kwargs) [ 713.205829] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 713.205829] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] raise e [ 713.205829] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 713.205829] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] nwinfo = self.network_api.allocate_for_instance( [ 713.205829] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 713.205829] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] created_port_ids = self._update_ports_for_instance( [ 713.205829] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 713.205829] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] with excutils.save_and_reraise_exception(): [ 713.205829] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 713.205829] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] self.force_reraise() [ 713.205829] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 713.206251] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] raise self.value [ 713.206251] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 713.206251] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] updated_port = self._update_port( [ 713.206251] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 713.206251] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] _ensure_no_port_binding_failure(port) [ 713.206251] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 713.206251] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] raise exception.PortBindingFailed(port_id=port['id']) [ 713.206251] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] nova.exception.PortBindingFailed: Binding failed for port b80aeac8-416d-4020-9230-566a651c290a, please check neutron logs for more information. [ 713.206251] env[61629]: ERROR nova.compute.manager [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] [ 713.206251] env[61629]: DEBUG nova.compute.utils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Binding failed for port b80aeac8-416d-4020-9230-566a651c290a, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 713.207937] env[61629]: DEBUG oslo_concurrency.lockutils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.268s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.208631] env[61629]: INFO nova.compute.claims [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 713.211415] env[61629]: DEBUG nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Build of instance c332c6fd-1edd-4d9e-85a9-32a408f9d05e was re-scheduled: Binding failed for port b80aeac8-416d-4020-9230-566a651c290a, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 713.211789] env[61629]: DEBUG nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 713.212017] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Acquiring lock "refresh_cache-c332c6fd-1edd-4d9e-85a9-32a408f9d05e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.212169] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Acquired lock "refresh_cache-c332c6fd-1edd-4d9e-85a9-32a408f9d05e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.212323] env[61629]: DEBUG nova.network.neutron [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 713.279138] env[61629]: DEBUG nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 713.303273] env[61629]: DEBUG nova.virt.hardware [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 713.303533] env[61629]: DEBUG nova.virt.hardware [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 713.303736] env[61629]: DEBUG nova.virt.hardware [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 713.303923] env[61629]: DEBUG nova.virt.hardware [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 713.304082] env[61629]: DEBUG nova.virt.hardware [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 713.304228] env[61629]: DEBUG nova.virt.hardware [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 713.304429] env[61629]: DEBUG nova.virt.hardware [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 713.304585] env[61629]: DEBUG nova.virt.hardware [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 713.304750] env[61629]: DEBUG nova.virt.hardware [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 713.304909] env[61629]: DEBUG nova.virt.hardware [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 713.305090] env[61629]: DEBUG nova.virt.hardware [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 713.305933] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e14b084-c29e-41e6-9518-e2feca3df40a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.314260] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32c25f7-cbd3-466a-b9fe-e7c639eb1cdc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.327797] env[61629]: ERROR nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1edcb2db-1404-4ebb-a079-e7a025dc6acf, please check neutron logs for more information. [ 713.327797] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Traceback (most recent call last): [ 713.327797] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 713.327797] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] yield resources [ 713.327797] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 713.327797] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] self.driver.spawn(context, instance, image_meta, [ 713.327797] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 713.327797] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] self._vmops.spawn(context, instance, image_meta, injected_files, [ 713.327797] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 713.327797] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] vm_ref = self.build_virtual_machine(instance, [ 713.327797] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 713.328227] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] vif_infos = vmwarevif.get_vif_info(self._session, [ 713.328227] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 713.328227] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] for vif in network_info: [ 713.328227] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 713.328227] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] return self._sync_wrapper(fn, *args, **kwargs) [ 713.328227] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 713.328227] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] self.wait() [ 713.328227] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 713.328227] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] self[:] = self._gt.wait() [ 713.328227] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 713.328227] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] return self._exit_event.wait() [ 713.328227] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 713.328227] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] current.throw(*self._exc) [ 713.328785] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 713.328785] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] result = function(*args, **kwargs) [ 713.328785] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 713.328785] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] return func(*args, **kwargs) [ 713.328785] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 713.328785] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] raise e [ 713.328785] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 713.328785] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] nwinfo = self.network_api.allocate_for_instance( [ 713.328785] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 713.328785] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] created_port_ids = self._update_ports_for_instance( [ 713.328785] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 713.328785] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] with excutils.save_and_reraise_exception(): [ 713.328785] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 713.329378] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] self.force_reraise() [ 713.329378] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 713.329378] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] raise self.value [ 713.329378] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 713.329378] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] updated_port = self._update_port( [ 713.329378] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 713.329378] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] _ensure_no_port_binding_failure(port) [ 713.329378] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 713.329378] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] raise exception.PortBindingFailed(port_id=port['id']) [ 713.329378] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] nova.exception.PortBindingFailed: Binding failed for port 1edcb2db-1404-4ebb-a079-e7a025dc6acf, please check neutron logs for more information. [ 713.329378] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] [ 713.329378] env[61629]: INFO nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Terminating instance [ 713.330274] env[61629]: DEBUG oslo_concurrency.lockutils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "refresh_cache-54e03464-0f37-4f4d-8746-821e73da0541" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.330438] env[61629]: DEBUG oslo_concurrency.lockutils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "refresh_cache-54e03464-0f37-4f4d-8746-821e73da0541" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.330630] env[61629]: DEBUG nova.network.neutron [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 713.363451] env[61629]: DEBUG nova.network.neutron [-] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.730104] env[61629]: DEBUG nova.network.neutron [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.839802] env[61629]: DEBUG nova.network.neutron [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.853987] env[61629]: DEBUG nova.network.neutron [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.865681] env[61629]: INFO nova.compute.manager [-] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Took 1.02 seconds to deallocate network for instance. [ 713.868050] env[61629]: DEBUG nova.compute.claims [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 713.868050] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.937494] env[61629]: DEBUG nova.network.neutron [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.344322] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Releasing lock "refresh_cache-c332c6fd-1edd-4d9e-85a9-32a408f9d05e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.344550] env[61629]: DEBUG nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 714.344724] env[61629]: DEBUG nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 714.344894] env[61629]: DEBUG nova.network.neutron [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 714.363714] env[61629]: DEBUG nova.network.neutron [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.442238] env[61629]: DEBUG oslo_concurrency.lockutils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "refresh_cache-54e03464-0f37-4f4d-8746-821e73da0541" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.442703] env[61629]: DEBUG nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 714.442905] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 714.443764] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e25e60ef-3138-458e-84eb-44c6a9272cdc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.452445] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d37c057-b2c9-4747-b1e4-c30373a1e71d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.474902] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 54e03464-0f37-4f4d-8746-821e73da0541 could not be found. [ 714.475109] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 714.475289] env[61629]: INFO nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Took 0.03 seconds to destroy the instance on the hypervisor. [ 714.475523] env[61629]: DEBUG oslo.service.loopingcall [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 714.477636] env[61629]: DEBUG nova.compute.manager [-] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 714.477740] env[61629]: DEBUG nova.network.neutron [-] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 714.493648] env[61629]: DEBUG nova.network.neutron [-] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.540424] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a08f75-36bd-450c-9f6b-0d17fe789dad {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.550696] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc03f9a4-72d3-4a17-81b1-a07ba79f44bc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.580315] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e6f178-75c6-4dab-ae65-06913e95ceb3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.587072] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137180ea-edda-4b65-b749-39d8963310fe {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.600071] env[61629]: DEBUG nova.compute.provider_tree [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.710435] env[61629]: DEBUG nova.compute.manager [req-9192e6fe-4a04-49e7-b572-a711f431e1f6 req-4ff72c0e-9ce8-4164-ad92-32a9b897026f service nova] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Received event network-changed-1edcb2db-1404-4ebb-a079-e7a025dc6acf {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 714.710809] env[61629]: DEBUG nova.compute.manager [req-9192e6fe-4a04-49e7-b572-a711f431e1f6 req-4ff72c0e-9ce8-4164-ad92-32a9b897026f service nova] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Refreshing instance network info cache due to event network-changed-1edcb2db-1404-4ebb-a079-e7a025dc6acf. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 714.711072] env[61629]: DEBUG oslo_concurrency.lockutils [req-9192e6fe-4a04-49e7-b572-a711f431e1f6 req-4ff72c0e-9ce8-4164-ad92-32a9b897026f service nova] Acquiring lock "refresh_cache-54e03464-0f37-4f4d-8746-821e73da0541" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.711259] env[61629]: DEBUG oslo_concurrency.lockutils [req-9192e6fe-4a04-49e7-b572-a711f431e1f6 req-4ff72c0e-9ce8-4164-ad92-32a9b897026f service nova] Acquired lock "refresh_cache-54e03464-0f37-4f4d-8746-821e73da0541" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.711379] env[61629]: DEBUG nova.network.neutron [req-9192e6fe-4a04-49e7-b572-a711f431e1f6 req-4ff72c0e-9ce8-4164-ad92-32a9b897026f service nova] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Refreshing network info cache for port 1edcb2db-1404-4ebb-a079-e7a025dc6acf {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 714.872167] env[61629]: DEBUG nova.network.neutron [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.996134] env[61629]: DEBUG nova.network.neutron [-] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.102541] env[61629]: DEBUG nova.scheduler.client.report [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 715.228798] env[61629]: DEBUG nova.network.neutron [req-9192e6fe-4a04-49e7-b572-a711f431e1f6 req-4ff72c0e-9ce8-4164-ad92-32a9b897026f service nova] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 715.308852] env[61629]: DEBUG nova.network.neutron [req-9192e6fe-4a04-49e7-b572-a711f431e1f6 req-4ff72c0e-9ce8-4164-ad92-32a9b897026f service nova] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.375028] env[61629]: INFO nova.compute.manager [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] [instance: c332c6fd-1edd-4d9e-85a9-32a408f9d05e] Took 1.03 seconds to deallocate network for instance. [ 715.499378] env[61629]: INFO nova.compute.manager [-] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Took 1.02 seconds to deallocate network for instance. [ 715.501947] env[61629]: DEBUG nova.compute.claims [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 715.502192] env[61629]: DEBUG oslo_concurrency.lockutils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.608046] env[61629]: DEBUG oslo_concurrency.lockutils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.401s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.608675] env[61629]: DEBUG nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 715.611431] env[61629]: DEBUG oslo_concurrency.lockutils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.634s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.613421] env[61629]: INFO nova.compute.claims [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 715.812486] env[61629]: DEBUG oslo_concurrency.lockutils [req-9192e6fe-4a04-49e7-b572-a711f431e1f6 req-4ff72c0e-9ce8-4164-ad92-32a9b897026f service nova] Releasing lock "refresh_cache-54e03464-0f37-4f4d-8746-821e73da0541" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.812486] env[61629]: DEBUG nova.compute.manager [req-9192e6fe-4a04-49e7-b572-a711f431e1f6 req-4ff72c0e-9ce8-4164-ad92-32a9b897026f service nova] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Received event network-vif-deleted-1edcb2db-1404-4ebb-a079-e7a025dc6acf {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 716.118127] env[61629]: DEBUG nova.compute.utils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 716.122508] env[61629]: DEBUG nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 716.122508] env[61629]: DEBUG nova.network.neutron [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 716.168018] env[61629]: DEBUG nova.policy [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2bdcaacaf2034ff994ee2e8b0e5071b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cc5fe81fb0eb4820825cc8e97b8fe4f2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 716.416406] env[61629]: INFO nova.scheduler.client.report [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Deleted allocations for instance c332c6fd-1edd-4d9e-85a9-32a408f9d05e [ 716.501674] env[61629]: DEBUG nova.network.neutron [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Successfully created port: 9882889a-9537-4827-a883-dcc6746b2eaf {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 716.623936] env[61629]: DEBUG nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 716.926535] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b777c25f-626f-4188-b592-fc2fef3dc104 tempest-ServersTestJSON-1510503269 tempest-ServersTestJSON-1510503269-project-member] Lock "c332c6fd-1edd-4d9e-85a9-32a408f9d05e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.105s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.999731] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7fdcb27-38d8-49e7-a66b-ca9ea741243e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.007603] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d9edc9-f458-458c-b907-5d4c9fecebde {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.056351] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1948c0-a55b-42b8-8ab2-176620b0d5ba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.067201] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668d09b9-0607-477a-9b15-8e9751b3d6e3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.089435] env[61629]: DEBUG nova.compute.provider_tree [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.440587] env[61629]: DEBUG nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 717.509229] env[61629]: DEBUG nova.compute.manager [req-73f1ca86-86e8-481f-8804-5472a6bdcc94 req-500fece3-5b81-453b-873c-36d6d7334f28 service nova] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Received event network-changed-9882889a-9537-4827-a883-dcc6746b2eaf {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 717.509427] env[61629]: DEBUG nova.compute.manager [req-73f1ca86-86e8-481f-8804-5472a6bdcc94 req-500fece3-5b81-453b-873c-36d6d7334f28 service nova] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Refreshing instance network info cache due to event network-changed-9882889a-9537-4827-a883-dcc6746b2eaf. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 717.509641] env[61629]: DEBUG oslo_concurrency.lockutils [req-73f1ca86-86e8-481f-8804-5472a6bdcc94 req-500fece3-5b81-453b-873c-36d6d7334f28 service nova] Acquiring lock "refresh_cache-71a5a130-fd26-4cf5-9b27-520f9eb62c55" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.509784] env[61629]: DEBUG oslo_concurrency.lockutils [req-73f1ca86-86e8-481f-8804-5472a6bdcc94 req-500fece3-5b81-453b-873c-36d6d7334f28 service nova] Acquired lock "refresh_cache-71a5a130-fd26-4cf5-9b27-520f9eb62c55" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.509942] env[61629]: DEBUG nova.network.neutron [req-73f1ca86-86e8-481f-8804-5472a6bdcc94 req-500fece3-5b81-453b-873c-36d6d7334f28 service nova] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Refreshing network info cache for port 9882889a-9537-4827-a883-dcc6746b2eaf {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 717.594263] env[61629]: DEBUG nova.scheduler.client.report [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 717.609331] env[61629]: ERROR nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9882889a-9537-4827-a883-dcc6746b2eaf, please check neutron logs for more information. [ 717.609331] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 717.609331] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.609331] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 717.609331] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 717.609331] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 717.609331] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 717.609331] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 717.609331] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.609331] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 717.609331] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.609331] env[61629]: ERROR nova.compute.manager raise self.value [ 717.609331] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 717.609331] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 717.609331] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.609331] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 717.609807] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.609807] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 717.609807] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9882889a-9537-4827-a883-dcc6746b2eaf, please check neutron logs for more information. [ 717.609807] env[61629]: ERROR nova.compute.manager [ 717.609807] env[61629]: Traceback (most recent call last): [ 717.609807] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 717.609807] env[61629]: listener.cb(fileno) [ 717.609807] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 717.609807] env[61629]: result = function(*args, **kwargs) [ 717.609807] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 717.609807] env[61629]: return func(*args, **kwargs) [ 717.609807] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 717.609807] env[61629]: raise e [ 717.609807] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.609807] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 717.609807] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 717.609807] env[61629]: created_port_ids = self._update_ports_for_instance( [ 717.609807] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 717.609807] env[61629]: with excutils.save_and_reraise_exception(): [ 717.609807] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.609807] env[61629]: self.force_reraise() [ 717.609807] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.609807] env[61629]: raise self.value [ 717.609807] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 717.609807] env[61629]: updated_port = self._update_port( [ 717.609807] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.609807] env[61629]: _ensure_no_port_binding_failure(port) [ 717.609807] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.609807] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 717.610858] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 9882889a-9537-4827-a883-dcc6746b2eaf, please check neutron logs for more information. [ 717.610858] env[61629]: Removing descriptor: 15 [ 717.638120] env[61629]: DEBUG nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 717.664812] env[61629]: DEBUG nova.virt.hardware [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 717.665086] env[61629]: DEBUG nova.virt.hardware [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 717.665257] env[61629]: DEBUG nova.virt.hardware [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 717.665437] env[61629]: DEBUG nova.virt.hardware [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 717.665582] env[61629]: DEBUG nova.virt.hardware [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 717.665727] env[61629]: DEBUG nova.virt.hardware [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 717.665933] env[61629]: DEBUG nova.virt.hardware [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 717.666107] env[61629]: DEBUG nova.virt.hardware [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 717.666278] env[61629]: DEBUG nova.virt.hardware [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 717.666441] env[61629]: DEBUG nova.virt.hardware [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 717.666611] env[61629]: DEBUG nova.virt.hardware [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 717.667504] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba09211-0171-40e9-8cc1-f611e233bc2c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.675322] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db628fe-d6bd-4b8c-8d3e-961cbb120ff7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.689265] env[61629]: ERROR nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9882889a-9537-4827-a883-dcc6746b2eaf, please check neutron logs for more information. [ 717.689265] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Traceback (most recent call last): [ 717.689265] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 717.689265] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] yield resources [ 717.689265] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 717.689265] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] self.driver.spawn(context, instance, image_meta, [ 717.689265] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 717.689265] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] self._vmops.spawn(context, instance, image_meta, injected_files, [ 717.689265] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 717.689265] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] vm_ref = self.build_virtual_machine(instance, [ 717.689265] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 717.689577] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] vif_infos = vmwarevif.get_vif_info(self._session, [ 717.689577] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 717.689577] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] for vif in network_info: [ 717.689577] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 717.689577] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] return self._sync_wrapper(fn, *args, **kwargs) [ 717.689577] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 717.689577] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] self.wait() [ 717.689577] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 717.689577] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] self[:] = self._gt.wait() [ 717.689577] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 717.689577] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] return self._exit_event.wait() [ 717.689577] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 717.689577] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] current.throw(*self._exc) [ 717.689905] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 717.689905] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] result = function(*args, **kwargs) [ 717.689905] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 717.689905] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] return func(*args, **kwargs) [ 717.689905] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 717.689905] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] raise e [ 717.689905] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.689905] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] nwinfo = self.network_api.allocate_for_instance( [ 717.689905] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 717.689905] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] created_port_ids = self._update_ports_for_instance( [ 717.689905] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 717.689905] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] with excutils.save_and_reraise_exception(): [ 717.689905] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.690282] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] self.force_reraise() [ 717.690282] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.690282] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] raise self.value [ 717.690282] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 717.690282] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] updated_port = self._update_port( [ 717.690282] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.690282] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] _ensure_no_port_binding_failure(port) [ 717.690282] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.690282] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] raise exception.PortBindingFailed(port_id=port['id']) [ 717.690282] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] nova.exception.PortBindingFailed: Binding failed for port 9882889a-9537-4827-a883-dcc6746b2eaf, please check neutron logs for more information. [ 717.690282] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] [ 717.690282] env[61629]: INFO nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Terminating instance [ 717.691545] env[61629]: DEBUG oslo_concurrency.lockutils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "refresh_cache-71a5a130-fd26-4cf5-9b27-520f9eb62c55" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.979989] env[61629]: DEBUG oslo_concurrency.lockutils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.039997] env[61629]: DEBUG nova.network.neutron [req-73f1ca86-86e8-481f-8804-5472a6bdcc94 req-500fece3-5b81-453b-873c-36d6d7334f28 service nova] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 718.103473] env[61629]: DEBUG oslo_concurrency.lockutils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.491s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.103473] env[61629]: DEBUG nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 718.107208] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.756s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.108734] env[61629]: INFO nova.compute.claims [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 718.211461] env[61629]: DEBUG nova.network.neutron [req-73f1ca86-86e8-481f-8804-5472a6bdcc94 req-500fece3-5b81-453b-873c-36d6d7334f28 service nova] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.617946] env[61629]: DEBUG nova.compute.utils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 718.618276] env[61629]: DEBUG nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 718.618276] env[61629]: DEBUG nova.network.neutron [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 718.714150] env[61629]: DEBUG oslo_concurrency.lockutils [req-73f1ca86-86e8-481f-8804-5472a6bdcc94 req-500fece3-5b81-453b-873c-36d6d7334f28 service nova] Releasing lock "refresh_cache-71a5a130-fd26-4cf5-9b27-520f9eb62c55" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.715856] env[61629]: DEBUG nova.policy [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c16c3a3f8ec4df195b6b16b356fde15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e6f6ddab35554ac1a839c7fad10aace3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 718.720544] env[61629]: DEBUG oslo_concurrency.lockutils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquired lock "refresh_cache-71a5a130-fd26-4cf5-9b27-520f9eb62c55" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.720749] env[61629]: DEBUG nova.network.neutron [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 719.123984] env[61629]: DEBUG nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 719.242620] env[61629]: DEBUG nova.network.neutron [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 719.333354] env[61629]: DEBUG nova.network.neutron [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.347476] env[61629]: DEBUG nova.network.neutron [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Successfully created port: fe934045-b15c-49e0-b824-cd21c688263e {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.545876] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ec0ac4-8dac-4120-a9db-ebbc696dd02a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.555793] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75834a1-c7c0-4997-9b1f-7765cb403139 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.591793] env[61629]: DEBUG nova.compute.manager [req-5c59f5f0-6553-4468-936a-681876f9808c req-85fc3ecd-1289-4f8a-94fd-61f740221c88 service nova] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Received event network-vif-deleted-9882889a-9537-4827-a883-dcc6746b2eaf {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 719.592798] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6ce98e-45dc-4eb0-9022-b8dfaac35831 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.601605] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7df22f-9c0e-40bd-8d49-d1e9913fb6f4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.616056] env[61629]: DEBUG nova.compute.provider_tree [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 719.835847] env[61629]: DEBUG oslo_concurrency.lockutils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Releasing lock "refresh_cache-71a5a130-fd26-4cf5-9b27-520f9eb62c55" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.836435] env[61629]: DEBUG nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 719.836510] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 719.836803] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d10247a2-4744-4e1b-af15-033ca9a46312 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.846026] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445ac9ff-9631-4f6f-bf9b-3f77b0f3aba5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.869044] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 71a5a130-fd26-4cf5-9b27-520f9eb62c55 could not be found. [ 719.869044] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 719.869044] env[61629]: INFO nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Took 0.03 seconds to destroy the instance on the hypervisor. [ 719.869044] env[61629]: DEBUG oslo.service.loopingcall [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 719.869044] env[61629]: DEBUG nova.compute.manager [-] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 719.869044] env[61629]: DEBUG nova.network.neutron [-] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 719.889173] env[61629]: DEBUG nova.network.neutron [-] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 720.119101] env[61629]: DEBUG nova.scheduler.client.report [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 720.133334] env[61629]: DEBUG nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 720.167870] env[61629]: DEBUG nova.virt.hardware [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:56:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='88fe75f5-1715-4c09-a189-4c22f8156e6a',id=37,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-988735152',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 720.168188] env[61629]: DEBUG nova.virt.hardware [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 720.168349] env[61629]: DEBUG nova.virt.hardware [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 720.168528] env[61629]: DEBUG nova.virt.hardware [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 720.168672] env[61629]: DEBUG nova.virt.hardware [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 720.168823] env[61629]: DEBUG nova.virt.hardware [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 720.169039] env[61629]: DEBUG nova.virt.hardware [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 720.169200] env[61629]: DEBUG nova.virt.hardware [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 720.169364] env[61629]: DEBUG nova.virt.hardware [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 720.169521] env[61629]: DEBUG nova.virt.hardware [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 720.169688] env[61629]: DEBUG nova.virt.hardware [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 720.170900] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251be527-3a88-4f07-bc58-d5f0005864c8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.179160] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2346936-29e1-4fa7-a96c-52128212af62 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.391608] env[61629]: DEBUG nova.network.neutron [-] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.542083] env[61629]: ERROR nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fe934045-b15c-49e0-b824-cd21c688263e, please check neutron logs for more information. [ 720.542083] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 720.542083] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 720.542083] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 720.542083] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 720.542083] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 720.542083] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 720.542083] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 720.542083] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 720.542083] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 720.542083] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 720.542083] env[61629]: ERROR nova.compute.manager raise self.value [ 720.542083] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 720.542083] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 720.542083] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 720.542083] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 720.542879] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 720.542879] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 720.542879] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fe934045-b15c-49e0-b824-cd21c688263e, please check neutron logs for more information. [ 720.542879] env[61629]: ERROR nova.compute.manager [ 720.542879] env[61629]: Traceback (most recent call last): [ 720.542879] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 720.542879] env[61629]: listener.cb(fileno) [ 720.542879] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 720.542879] env[61629]: result = function(*args, **kwargs) [ 720.542879] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 720.542879] env[61629]: return func(*args, **kwargs) [ 720.542879] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 720.542879] env[61629]: raise e [ 720.542879] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 720.542879] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 720.542879] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 720.542879] env[61629]: created_port_ids = self._update_ports_for_instance( [ 720.542879] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 720.542879] env[61629]: with excutils.save_and_reraise_exception(): [ 720.542879] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 720.542879] env[61629]: self.force_reraise() [ 720.542879] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 720.542879] env[61629]: raise self.value [ 720.542879] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 720.542879] env[61629]: updated_port = self._update_port( [ 720.542879] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 720.542879] env[61629]: _ensure_no_port_binding_failure(port) [ 720.542879] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 720.542879] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 720.545220] env[61629]: nova.exception.PortBindingFailed: Binding failed for port fe934045-b15c-49e0-b824-cd21c688263e, please check neutron logs for more information. [ 720.545220] env[61629]: Removing descriptor: 15 [ 720.545220] env[61629]: ERROR nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fe934045-b15c-49e0-b824-cd21c688263e, please check neutron logs for more information. [ 720.545220] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Traceback (most recent call last): [ 720.545220] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 720.545220] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] yield resources [ 720.545220] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 720.545220] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] self.driver.spawn(context, instance, image_meta, [ 720.545220] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 720.545220] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 720.545220] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 720.545220] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] vm_ref = self.build_virtual_machine(instance, [ 720.545569] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 720.545569] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] vif_infos = vmwarevif.get_vif_info(self._session, [ 720.545569] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 720.545569] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] for vif in network_info: [ 720.545569] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 720.545569] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] return self._sync_wrapper(fn, *args, **kwargs) [ 720.545569] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 720.545569] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] self.wait() [ 720.545569] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 720.545569] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] self[:] = self._gt.wait() [ 720.545569] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 720.545569] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] return self._exit_event.wait() [ 720.545569] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 720.545909] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] result = hub.switch() [ 720.545909] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 720.545909] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] return self.greenlet.switch() [ 720.545909] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 720.545909] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] result = function(*args, **kwargs) [ 720.545909] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 720.545909] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] return func(*args, **kwargs) [ 720.545909] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 720.545909] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] raise e [ 720.545909] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 720.545909] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] nwinfo = self.network_api.allocate_for_instance( [ 720.545909] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 720.545909] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] created_port_ids = self._update_ports_for_instance( [ 720.546890] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 720.546890] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] with excutils.save_and_reraise_exception(): [ 720.546890] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 720.546890] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] self.force_reraise() [ 720.546890] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 720.546890] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] raise self.value [ 720.546890] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 720.546890] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] updated_port = self._update_port( [ 720.546890] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 720.546890] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] _ensure_no_port_binding_failure(port) [ 720.546890] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 720.546890] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] raise exception.PortBindingFailed(port_id=port['id']) [ 720.547270] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] nova.exception.PortBindingFailed: Binding failed for port fe934045-b15c-49e0-b824-cd21c688263e, please check neutron logs for more information. [ 720.547270] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] [ 720.547270] env[61629]: INFO nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Terminating instance [ 720.547270] env[61629]: DEBUG oslo_concurrency.lockutils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Acquiring lock "refresh_cache-6dd1097f-7353-4938-be2b-51c248e45fe2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.547270] env[61629]: DEBUG oslo_concurrency.lockutils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Acquired lock "refresh_cache-6dd1097f-7353-4938-be2b-51c248e45fe2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.547406] env[61629]: DEBUG nova.network.neutron [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 720.625916] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.517s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.625916] env[61629]: DEBUG nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 720.628748] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.351s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.630705] env[61629]: INFO nova.compute.claims [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 720.897682] env[61629]: INFO nova.compute.manager [-] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Took 1.03 seconds to deallocate network for instance. [ 720.901216] env[61629]: DEBUG nova.compute.claims [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 720.901493] env[61629]: DEBUG oslo_concurrency.lockutils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.069724] env[61629]: DEBUG nova.network.neutron [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.130209] env[61629]: DEBUG nova.compute.utils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 721.132344] env[61629]: DEBUG nova.network.neutron [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.134521] env[61629]: DEBUG nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 721.134521] env[61629]: DEBUG nova.network.neutron [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 721.201500] env[61629]: DEBUG nova.policy [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d6a49da995247cd82f5bbc8723e0bec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8cfdf0378fdb4608aad6441f4ec6395a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 721.551602] env[61629]: DEBUG nova.network.neutron [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Successfully created port: 61e08638-0935-43a7-9938-7d14435a76dd {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 721.595015] env[61629]: DEBUG nova.compute.manager [req-1ca4984a-c925-47d2-b7cd-945c5a922618 req-2c5149f3-59a5-4df9-b204-7418a419af53 service nova] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Received event network-changed-fe934045-b15c-49e0-b824-cd21c688263e {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 721.595331] env[61629]: DEBUG nova.compute.manager [req-1ca4984a-c925-47d2-b7cd-945c5a922618 req-2c5149f3-59a5-4df9-b204-7418a419af53 service nova] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Refreshing instance network info cache due to event network-changed-fe934045-b15c-49e0-b824-cd21c688263e. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 721.595583] env[61629]: DEBUG oslo_concurrency.lockutils [req-1ca4984a-c925-47d2-b7cd-945c5a922618 req-2c5149f3-59a5-4df9-b204-7418a419af53 service nova] Acquiring lock "refresh_cache-6dd1097f-7353-4938-be2b-51c248e45fe2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.638073] env[61629]: DEBUG oslo_concurrency.lockutils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Releasing lock "refresh_cache-6dd1097f-7353-4938-be2b-51c248e45fe2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 721.638698] env[61629]: DEBUG nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 721.638930] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 721.639496] env[61629]: DEBUG nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 721.647021] env[61629]: DEBUG oslo_concurrency.lockutils [req-1ca4984a-c925-47d2-b7cd-945c5a922618 req-2c5149f3-59a5-4df9-b204-7418a419af53 service nova] Acquired lock "refresh_cache-6dd1097f-7353-4938-be2b-51c248e45fe2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.647021] env[61629]: DEBUG nova.network.neutron [req-1ca4984a-c925-47d2-b7cd-945c5a922618 req-2c5149f3-59a5-4df9-b204-7418a419af53 service nova] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Refreshing network info cache for port fe934045-b15c-49e0-b824-cd21c688263e {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 721.647021] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f53ecfec-33b7-4371-8ec3-fa234b9f27a7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.668573] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5853fd-57d6-49ae-9293-de08164d02f7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.699234] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6dd1097f-7353-4938-be2b-51c248e45fe2 could not be found. [ 721.699234] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 721.699234] env[61629]: INFO nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Took 0.06 seconds to destroy the instance on the hypervisor. [ 721.699234] env[61629]: DEBUG oslo.service.loopingcall [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 721.699234] env[61629]: DEBUG nova.compute.manager [-] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 721.699234] env[61629]: DEBUG nova.network.neutron [-] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 721.725103] env[61629]: DEBUG nova.network.neutron [-] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.086013] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e49e0a-0be7-41e2-a369-777f11790ede {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.093645] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921dc782-d9ce-4a15-8ded-1d0e901cc8b8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.122884] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882bec43-cc7a-4e40-b9ad-0de6de8ae44e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.130530] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c0271e-a89b-4a79-ba58-b35dbf3d5928 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.143887] env[61629]: DEBUG nova.compute.provider_tree [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.175038] env[61629]: DEBUG nova.network.neutron [req-1ca4984a-c925-47d2-b7cd-945c5a922618 req-2c5149f3-59a5-4df9-b204-7418a419af53 service nova] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.227150] env[61629]: DEBUG nova.network.neutron [-] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.262620] env[61629]: DEBUG nova.network.neutron [req-1ca4984a-c925-47d2-b7cd-945c5a922618 req-2c5149f3-59a5-4df9-b204-7418a419af53 service nova] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.626436] env[61629]: ERROR nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 61e08638-0935-43a7-9938-7d14435a76dd, please check neutron logs for more information. [ 722.626436] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 722.626436] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 722.626436] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 722.626436] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 722.626436] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 722.626436] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 722.626436] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 722.626436] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.626436] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 722.626436] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.626436] env[61629]: ERROR nova.compute.manager raise self.value [ 722.626436] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 722.626436] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 722.626436] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.626436] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 722.626889] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.626889] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 722.626889] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 61e08638-0935-43a7-9938-7d14435a76dd, please check neutron logs for more information. [ 722.626889] env[61629]: ERROR nova.compute.manager [ 722.626889] env[61629]: Traceback (most recent call last): [ 722.626889] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 722.626889] env[61629]: listener.cb(fileno) [ 722.626889] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 722.626889] env[61629]: result = function(*args, **kwargs) [ 722.626889] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 722.626889] env[61629]: return func(*args, **kwargs) [ 722.626889] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 722.626889] env[61629]: raise e [ 722.626889] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 722.626889] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 722.626889] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 722.626889] env[61629]: created_port_ids = self._update_ports_for_instance( [ 722.626889] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 722.626889] env[61629]: with excutils.save_and_reraise_exception(): [ 722.626889] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.626889] env[61629]: self.force_reraise() [ 722.626889] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.626889] env[61629]: raise self.value [ 722.626889] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 722.626889] env[61629]: updated_port = self._update_port( [ 722.626889] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.626889] env[61629]: _ensure_no_port_binding_failure(port) [ 722.626889] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.626889] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 722.628380] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 61e08638-0935-43a7-9938-7d14435a76dd, please check neutron logs for more information. [ 722.628380] env[61629]: Removing descriptor: 15 [ 722.646950] env[61629]: DEBUG nova.scheduler.client.report [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 722.661725] env[61629]: DEBUG nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 722.690542] env[61629]: DEBUG nova.virt.hardware [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 722.690785] env[61629]: DEBUG nova.virt.hardware [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 722.690938] env[61629]: DEBUG nova.virt.hardware [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 722.691127] env[61629]: DEBUG nova.virt.hardware [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 722.691271] env[61629]: DEBUG nova.virt.hardware [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 722.691414] env[61629]: DEBUG nova.virt.hardware [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 722.691611] env[61629]: DEBUG nova.virt.hardware [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 722.691765] env[61629]: DEBUG nova.virt.hardware [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 722.691927] env[61629]: DEBUG nova.virt.hardware [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 722.692151] env[61629]: DEBUG nova.virt.hardware [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 722.692453] env[61629]: DEBUG nova.virt.hardware [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 722.693358] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7f7dc9-ded1-4c74-969d-a5a691d6a947 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.701206] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b82c95-518b-4aae-bf86-2ca3e70fb586 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.715699] env[61629]: ERROR nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 61e08638-0935-43a7-9938-7d14435a76dd, please check neutron logs for more information. [ 722.715699] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Traceback (most recent call last): [ 722.715699] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 722.715699] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] yield resources [ 722.715699] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 722.715699] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] self.driver.spawn(context, instance, image_meta, [ 722.715699] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 722.715699] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] self._vmops.spawn(context, instance, image_meta, injected_files, [ 722.715699] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 722.715699] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] vm_ref = self.build_virtual_machine(instance, [ 722.715699] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 722.716040] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] vif_infos = vmwarevif.get_vif_info(self._session, [ 722.716040] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 722.716040] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] for vif in network_info: [ 722.716040] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 722.716040] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] return self._sync_wrapper(fn, *args, **kwargs) [ 722.716040] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 722.716040] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] self.wait() [ 722.716040] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 722.716040] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] self[:] = self._gt.wait() [ 722.716040] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 722.716040] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] return self._exit_event.wait() [ 722.716040] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 722.716040] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] current.throw(*self._exc) [ 722.716408] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 722.716408] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] result = function(*args, **kwargs) [ 722.716408] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 722.716408] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] return func(*args, **kwargs) [ 722.716408] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 722.716408] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] raise e [ 722.716408] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 722.716408] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] nwinfo = self.network_api.allocate_for_instance( [ 722.716408] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 722.716408] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] created_port_ids = self._update_ports_for_instance( [ 722.716408] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 722.716408] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] with excutils.save_and_reraise_exception(): [ 722.716408] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.716770] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] self.force_reraise() [ 722.716770] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.716770] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] raise self.value [ 722.716770] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 722.716770] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] updated_port = self._update_port( [ 722.716770] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.716770] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] _ensure_no_port_binding_failure(port) [ 722.716770] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.716770] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] raise exception.PortBindingFailed(port_id=port['id']) [ 722.716770] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] nova.exception.PortBindingFailed: Binding failed for port 61e08638-0935-43a7-9938-7d14435a76dd, please check neutron logs for more information. [ 722.716770] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] [ 722.716770] env[61629]: INFO nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Terminating instance [ 722.718044] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Acquiring lock "refresh_cache-39f7c5ee-7d07-4516-b008-40d5778cf139" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.718208] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Acquired lock "refresh_cache-39f7c5ee-7d07-4516-b008-40d5778cf139" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.718370] env[61629]: DEBUG nova.network.neutron [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 722.729601] env[61629]: INFO nova.compute.manager [-] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Took 1.03 seconds to deallocate network for instance. [ 722.731214] env[61629]: DEBUG nova.compute.claims [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 722.731380] env[61629]: DEBUG oslo_concurrency.lockutils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.766200] env[61629]: DEBUG oslo_concurrency.lockutils [req-1ca4984a-c925-47d2-b7cd-945c5a922618 req-2c5149f3-59a5-4df9-b204-7418a419af53 service nova] Releasing lock "refresh_cache-6dd1097f-7353-4938-be2b-51c248e45fe2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.766498] env[61629]: DEBUG nova.compute.manager [req-1ca4984a-c925-47d2-b7cd-945c5a922618 req-2c5149f3-59a5-4df9-b204-7418a419af53 service nova] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Received event network-vif-deleted-fe934045-b15c-49e0-b824-cd21c688263e {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 723.154123] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.525s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.156035] env[61629]: DEBUG nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 723.157896] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.460s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.157896] env[61629]: DEBUG nova.objects.instance [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lazy-loading 'resources' on Instance uuid dd406dd1-0e19-400b-a862-ae51fd134017 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 723.244929] env[61629]: DEBUG nova.network.neutron [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.330329] env[61629]: DEBUG nova.network.neutron [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.615829] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "76f08ac6-bb83-4d61-9707-b602028c54f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.615829] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "76f08ac6-bb83-4d61-9707-b602028c54f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.652147] env[61629]: DEBUG nova.compute.manager [req-c6c5c030-2bde-4603-b4dc-9b6f33adbb5f req-028953f3-ccee-41d3-b69b-8209f2438d03 service nova] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Received event network-changed-61e08638-0935-43a7-9938-7d14435a76dd {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 723.652147] env[61629]: DEBUG nova.compute.manager [req-c6c5c030-2bde-4603-b4dc-9b6f33adbb5f req-028953f3-ccee-41d3-b69b-8209f2438d03 service nova] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Refreshing instance network info cache due to event network-changed-61e08638-0935-43a7-9938-7d14435a76dd. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 723.652147] env[61629]: DEBUG oslo_concurrency.lockutils [req-c6c5c030-2bde-4603-b4dc-9b6f33adbb5f req-028953f3-ccee-41d3-b69b-8209f2438d03 service nova] Acquiring lock "refresh_cache-39f7c5ee-7d07-4516-b008-40d5778cf139" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.663602] env[61629]: DEBUG nova.compute.utils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 723.665285] env[61629]: DEBUG nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 723.665441] env[61629]: DEBUG nova.network.neutron [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 723.714217] env[61629]: DEBUG nova.policy [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '03b7a88ad6264926ae54b1880e8c4a98', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3faf107bfcdd4d9fb07afc1b3f51e36c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 723.833535] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Releasing lock "refresh_cache-39f7c5ee-7d07-4516-b008-40d5778cf139" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.834094] env[61629]: DEBUG nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 723.834776] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 723.834776] env[61629]: DEBUG oslo_concurrency.lockutils [req-c6c5c030-2bde-4603-b4dc-9b6f33adbb5f req-028953f3-ccee-41d3-b69b-8209f2438d03 service nova] Acquired lock "refresh_cache-39f7c5ee-7d07-4516-b008-40d5778cf139" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.834909] env[61629]: DEBUG nova.network.neutron [req-c6c5c030-2bde-4603-b4dc-9b6f33adbb5f req-028953f3-ccee-41d3-b69b-8209f2438d03 service nova] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Refreshing network info cache for port 61e08638-0935-43a7-9938-7d14435a76dd {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 723.835991] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-afc98628-fe16-4d2e-a257-5e09ad8550a8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.852010] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5542d74-e0cc-4e11-af28-9de4465592f3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.884950] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 39f7c5ee-7d07-4516-b008-40d5778cf139 could not be found. [ 723.884950] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 723.884950] env[61629]: INFO nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Took 0.05 seconds to destroy the instance on the hypervisor. [ 723.884950] env[61629]: DEBUG oslo.service.loopingcall [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 723.887472] env[61629]: DEBUG nova.compute.manager [-] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 723.887631] env[61629]: DEBUG nova.network.neutron [-] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 723.916934] env[61629]: DEBUG nova.network.neutron [-] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 724.057842] env[61629]: DEBUG nova.network.neutron [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Successfully created port: b5180d58-fd4a-49e0-8719-ec41c4dd7ef5 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 724.082476] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e961da8c-9db4-4128-b696-b5d4591bc4e1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.090667] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a2abf3-83ac-446f-a1aa-774637120fa8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.121077] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ecb879-e6d4-43a9-8dd6-3f9b52eb8da2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.129059] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfdd54c7-bd27-4a70-a2e0-470286209376 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.142677] env[61629]: DEBUG nova.compute.provider_tree [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.169243] env[61629]: DEBUG nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 724.362793] env[61629]: DEBUG nova.network.neutron [req-c6c5c030-2bde-4603-b4dc-9b6f33adbb5f req-028953f3-ccee-41d3-b69b-8209f2438d03 service nova] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 724.421688] env[61629]: DEBUG nova.network.neutron [-] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.486660] env[61629]: DEBUG nova.network.neutron [req-c6c5c030-2bde-4603-b4dc-9b6f33adbb5f req-028953f3-ccee-41d3-b69b-8209f2438d03 service nova] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.647637] env[61629]: DEBUG nova.scheduler.client.report [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 724.924479] env[61629]: INFO nova.compute.manager [-] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Took 1.04 seconds to deallocate network for instance. [ 724.926998] env[61629]: DEBUG nova.compute.claims [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 724.927199] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.989730] env[61629]: DEBUG oslo_concurrency.lockutils [req-c6c5c030-2bde-4603-b4dc-9b6f33adbb5f req-028953f3-ccee-41d3-b69b-8209f2438d03 service nova] Releasing lock "refresh_cache-39f7c5ee-7d07-4516-b008-40d5778cf139" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.990012] env[61629]: DEBUG nova.compute.manager [req-c6c5c030-2bde-4603-b4dc-9b6f33adbb5f req-028953f3-ccee-41d3-b69b-8209f2438d03 service nova] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Received event network-vif-deleted-61e08638-0935-43a7-9938-7d14435a76dd {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 725.088436] env[61629]: ERROR nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b5180d58-fd4a-49e0-8719-ec41c4dd7ef5, please check neutron logs for more information. [ 725.088436] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 725.088436] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 725.088436] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 725.088436] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.088436] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 725.088436] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.088436] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 725.088436] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.088436] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 725.088436] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.088436] env[61629]: ERROR nova.compute.manager raise self.value [ 725.088436] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.088436] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 725.088436] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.088436] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 725.088930] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.088930] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 725.088930] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b5180d58-fd4a-49e0-8719-ec41c4dd7ef5, please check neutron logs for more information. [ 725.088930] env[61629]: ERROR nova.compute.manager [ 725.088930] env[61629]: Traceback (most recent call last): [ 725.088930] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 725.088930] env[61629]: listener.cb(fileno) [ 725.088930] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 725.088930] env[61629]: result = function(*args, **kwargs) [ 725.088930] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 725.088930] env[61629]: return func(*args, **kwargs) [ 725.088930] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 725.088930] env[61629]: raise e [ 725.088930] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 725.088930] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 725.088930] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.088930] env[61629]: created_port_ids = self._update_ports_for_instance( [ 725.088930] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.088930] env[61629]: with excutils.save_and_reraise_exception(): [ 725.088930] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.088930] env[61629]: self.force_reraise() [ 725.088930] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.088930] env[61629]: raise self.value [ 725.088930] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.088930] env[61629]: updated_port = self._update_port( [ 725.088930] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.088930] env[61629]: _ensure_no_port_binding_failure(port) [ 725.088930] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.088930] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 725.089752] env[61629]: nova.exception.PortBindingFailed: Binding failed for port b5180d58-fd4a-49e0-8719-ec41c4dd7ef5, please check neutron logs for more information. [ 725.089752] env[61629]: Removing descriptor: 15 [ 725.155628] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.995s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.155628] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.728s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.179953] env[61629]: DEBUG nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 725.188208] env[61629]: INFO nova.scheduler.client.report [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Deleted allocations for instance dd406dd1-0e19-400b-a862-ae51fd134017 [ 725.211808] env[61629]: DEBUG nova.virt.hardware [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 725.211978] env[61629]: DEBUG nova.virt.hardware [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 725.212128] env[61629]: DEBUG nova.virt.hardware [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 725.212312] env[61629]: DEBUG nova.virt.hardware [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 725.212456] env[61629]: DEBUG nova.virt.hardware [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 725.212645] env[61629]: DEBUG nova.virt.hardware [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 725.212914] env[61629]: DEBUG nova.virt.hardware [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 725.212983] env[61629]: DEBUG nova.virt.hardware [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 725.213117] env[61629]: DEBUG nova.virt.hardware [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 725.213312] env[61629]: DEBUG nova.virt.hardware [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 725.213433] env[61629]: DEBUG nova.virt.hardware [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 725.214640] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f4a5c3-3996-4cd3-b859-d9bb1ec17d0a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.223486] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02820b0a-0be9-4e85-83e2-4772984daa06 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.237526] env[61629]: ERROR nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b5180d58-fd4a-49e0-8719-ec41c4dd7ef5, please check neutron logs for more information. [ 725.237526] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Traceback (most recent call last): [ 725.237526] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 725.237526] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] yield resources [ 725.237526] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 725.237526] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] self.driver.spawn(context, instance, image_meta, [ 725.237526] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 725.237526] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] self._vmops.spawn(context, instance, image_meta, injected_files, [ 725.237526] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 725.237526] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] vm_ref = self.build_virtual_machine(instance, [ 725.237526] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 725.237957] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] vif_infos = vmwarevif.get_vif_info(self._session, [ 725.237957] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 725.237957] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] for vif in network_info: [ 725.237957] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 725.237957] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] return self._sync_wrapper(fn, *args, **kwargs) [ 725.237957] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 725.237957] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] self.wait() [ 725.237957] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 725.237957] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] self[:] = self._gt.wait() [ 725.237957] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 725.237957] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] return self._exit_event.wait() [ 725.237957] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 725.237957] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] current.throw(*self._exc) [ 725.238377] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 725.238377] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] result = function(*args, **kwargs) [ 725.238377] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 725.238377] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] return func(*args, **kwargs) [ 725.238377] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 725.238377] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] raise e [ 725.238377] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 725.238377] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] nwinfo = self.network_api.allocate_for_instance( [ 725.238377] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.238377] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] created_port_ids = self._update_ports_for_instance( [ 725.238377] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.238377] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] with excutils.save_and_reraise_exception(): [ 725.238377] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.238799] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] self.force_reraise() [ 725.238799] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.238799] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] raise self.value [ 725.238799] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.238799] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] updated_port = self._update_port( [ 725.238799] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.238799] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] _ensure_no_port_binding_failure(port) [ 725.238799] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.238799] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] raise exception.PortBindingFailed(port_id=port['id']) [ 725.238799] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] nova.exception.PortBindingFailed: Binding failed for port b5180d58-fd4a-49e0-8719-ec41c4dd7ef5, please check neutron logs for more information. [ 725.238799] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] [ 725.238799] env[61629]: INFO nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Terminating instance [ 725.239954] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Acquiring lock "refresh_cache-05b868fd-401e-48b7-928f-a39c002bbe71" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.240126] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Acquired lock "refresh_cache-05b868fd-401e-48b7-928f-a39c002bbe71" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.240288] env[61629]: DEBUG nova.network.neutron [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 725.677565] env[61629]: DEBUG nova.compute.manager [req-0d3f7061-27c4-4b27-985e-540e18b07bfd req-cd217d3b-cd2f-4654-ab52-6456c09657a5 service nova] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Received event network-changed-b5180d58-fd4a-49e0-8719-ec41c4dd7ef5 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 725.677732] env[61629]: DEBUG nova.compute.manager [req-0d3f7061-27c4-4b27-985e-540e18b07bfd req-cd217d3b-cd2f-4654-ab52-6456c09657a5 service nova] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Refreshing instance network info cache due to event network-changed-b5180d58-fd4a-49e0-8719-ec41c4dd7ef5. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 725.677920] env[61629]: DEBUG oslo_concurrency.lockutils [req-0d3f7061-27c4-4b27-985e-540e18b07bfd req-cd217d3b-cd2f-4654-ab52-6456c09657a5 service nova] Acquiring lock "refresh_cache-05b868fd-401e-48b7-928f-a39c002bbe71" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.697845] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7dd78062-c96e-4638-8dd1-9ad90962cecc tempest-ServerShowV247Test-397641212 tempest-ServerShowV247Test-397641212-project-member] Lock "dd406dd1-0e19-400b-a862-ae51fd134017" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.764s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.761058] env[61629]: DEBUG nova.network.neutron [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.880030] env[61629]: DEBUG nova.network.neutron [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.984222] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0bbf010-d134-4243-845c-f4a715a6c071 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.991740] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc8069a-6317-4196-81c6-8ad95f2e56e4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.026941] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-524f9126-86d8-4f8c-aacd-dde8cd3d4254 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.034783] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d133c061-563b-487a-8370-9fee0944e5d3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.048976] env[61629]: DEBUG nova.compute.provider_tree [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.385058] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Releasing lock "refresh_cache-05b868fd-401e-48b7-928f-a39c002bbe71" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.385433] env[61629]: DEBUG nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 726.385433] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 726.385592] env[61629]: DEBUG oslo_concurrency.lockutils [req-0d3f7061-27c4-4b27-985e-540e18b07bfd req-cd217d3b-cd2f-4654-ab52-6456c09657a5 service nova] Acquired lock "refresh_cache-05b868fd-401e-48b7-928f-a39c002bbe71" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.385762] env[61629]: DEBUG nova.network.neutron [req-0d3f7061-27c4-4b27-985e-540e18b07bfd req-cd217d3b-cd2f-4654-ab52-6456c09657a5 service nova] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Refreshing network info cache for port b5180d58-fd4a-49e0-8719-ec41c4dd7ef5 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 726.386904] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-12313b91-35d9-4b87-9311-32b36721cff3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.397172] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144d9192-f6d9-44e0-aa3b-231cd54b2836 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.418372] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 05b868fd-401e-48b7-928f-a39c002bbe71 could not be found. [ 726.418587] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 726.418764] env[61629]: INFO nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Took 0.03 seconds to destroy the instance on the hypervisor. [ 726.419815] env[61629]: DEBUG oslo.service.loopingcall [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 726.419815] env[61629]: DEBUG nova.compute.manager [-] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 726.419815] env[61629]: DEBUG nova.network.neutron [-] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 726.440840] env[61629]: DEBUG nova.network.neutron [-] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.552888] env[61629]: DEBUG nova.scheduler.client.report [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 726.907222] env[61629]: DEBUG nova.network.neutron [req-0d3f7061-27c4-4b27-985e-540e18b07bfd req-cd217d3b-cd2f-4654-ab52-6456c09657a5 service nova] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.943507] env[61629]: DEBUG nova.network.neutron [-] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.995972] env[61629]: DEBUG nova.network.neutron [req-0d3f7061-27c4-4b27-985e-540e18b07bfd req-cd217d3b-cd2f-4654-ab52-6456c09657a5 service nova] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.058608] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.903s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.059271] env[61629]: ERROR nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5795bb6a-f713-42e8-baca-d885d777dc14, please check neutron logs for more information. [ 727.059271] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Traceback (most recent call last): [ 727.059271] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 727.059271] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] self.driver.spawn(context, instance, image_meta, [ 727.059271] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 727.059271] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 727.059271] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 727.059271] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] vm_ref = self.build_virtual_machine(instance, [ 727.059271] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 727.059271] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] vif_infos = vmwarevif.get_vif_info(self._session, [ 727.059271] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 727.059637] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] for vif in network_info: [ 727.059637] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 727.059637] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] return self._sync_wrapper(fn, *args, **kwargs) [ 727.059637] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 727.059637] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] self.wait() [ 727.059637] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 727.059637] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] self[:] = self._gt.wait() [ 727.059637] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 727.059637] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] return self._exit_event.wait() [ 727.059637] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 727.059637] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] result = hub.switch() [ 727.059637] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 727.059637] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] return self.greenlet.switch() [ 727.060185] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 727.060185] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] result = function(*args, **kwargs) [ 727.060185] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 727.060185] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] return func(*args, **kwargs) [ 727.060185] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 727.060185] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] raise e [ 727.060185] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.060185] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] nwinfo = self.network_api.allocate_for_instance( [ 727.060185] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 727.060185] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] created_port_ids = self._update_ports_for_instance( [ 727.060185] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 727.060185] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] with excutils.save_and_reraise_exception(): [ 727.060185] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.060605] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] self.force_reraise() [ 727.060605] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.060605] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] raise self.value [ 727.060605] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 727.060605] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] updated_port = self._update_port( [ 727.060605] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.060605] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] _ensure_no_port_binding_failure(port) [ 727.060605] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.060605] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] raise exception.PortBindingFailed(port_id=port['id']) [ 727.060605] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] nova.exception.PortBindingFailed: Binding failed for port 5795bb6a-f713-42e8-baca-d885d777dc14, please check neutron logs for more information. [ 727.060605] env[61629]: ERROR nova.compute.manager [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] [ 727.060935] env[61629]: DEBUG nova.compute.utils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Binding failed for port 5795bb6a-f713-42e8-baca-d885d777dc14, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 727.061136] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.416s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.064062] env[61629]: DEBUG nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Build of instance 374062de-1242-44bd-b658-e8976f8c3b6c was re-scheduled: Binding failed for port 5795bb6a-f713-42e8-baca-d885d777dc14, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 727.064825] env[61629]: DEBUG nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 727.065063] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "refresh_cache-374062de-1242-44bd-b658-e8976f8c3b6c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.065214] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquired lock "refresh_cache-374062de-1242-44bd-b658-e8976f8c3b6c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.065372] env[61629]: DEBUG nova.network.neutron [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 727.448104] env[61629]: INFO nova.compute.manager [-] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Took 1.03 seconds to deallocate network for instance. [ 727.452489] env[61629]: DEBUG nova.compute.claims [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 727.452689] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.498869] env[61629]: DEBUG oslo_concurrency.lockutils [req-0d3f7061-27c4-4b27-985e-540e18b07bfd req-cd217d3b-cd2f-4654-ab52-6456c09657a5 service nova] Releasing lock "refresh_cache-05b868fd-401e-48b7-928f-a39c002bbe71" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.498869] env[61629]: DEBUG nova.compute.manager [req-0d3f7061-27c4-4b27-985e-540e18b07bfd req-cd217d3b-cd2f-4654-ab52-6456c09657a5 service nova] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Received event network-vif-deleted-b5180d58-fd4a-49e0-8719-ec41c4dd7ef5 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 727.727668] env[61629]: DEBUG nova.network.neutron [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.827344] env[61629]: DEBUG nova.network.neutron [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.914259] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f471384-d5ac-442d-8946-a79a1ddb782d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.925022] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e02bfc-0227-4e6c-b491-3f8f73d4abe1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.958032] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237c23e9-f693-4c3c-9f95-1c195412a0ea {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.966408] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb24c8aa-bab7-4f65-9a67-c99423346a11 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.979554] env[61629]: DEBUG nova.compute.provider_tree [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.329922] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Releasing lock "refresh_cache-374062de-1242-44bd-b658-e8976f8c3b6c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.330183] env[61629]: DEBUG nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 728.330406] env[61629]: DEBUG nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 728.330739] env[61629]: DEBUG nova.network.neutron [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 728.344525] env[61629]: DEBUG nova.network.neutron [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 728.482878] env[61629]: DEBUG nova.scheduler.client.report [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 728.848073] env[61629]: DEBUG nova.network.neutron [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.987639] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.926s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.988276] env[61629]: ERROR nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 61771b60-a29f-4695-b630-b1cc0dd7ad27, please check neutron logs for more information. [ 728.988276] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Traceback (most recent call last): [ 728.988276] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 728.988276] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] self.driver.spawn(context, instance, image_meta, [ 728.988276] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 728.988276] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 728.988276] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 728.988276] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] vm_ref = self.build_virtual_machine(instance, [ 728.988276] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 728.988276] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] vif_infos = vmwarevif.get_vif_info(self._session, [ 728.988276] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 728.988698] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] for vif in network_info: [ 728.988698] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 728.988698] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] return self._sync_wrapper(fn, *args, **kwargs) [ 728.988698] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 728.988698] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] self.wait() [ 728.988698] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 728.988698] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] self[:] = self._gt.wait() [ 728.988698] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 728.988698] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] return self._exit_event.wait() [ 728.988698] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 728.988698] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] current.throw(*self._exc) [ 728.988698] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 728.988698] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] result = function(*args, **kwargs) [ 728.989230] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 728.989230] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] return func(*args, **kwargs) [ 728.989230] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 728.989230] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] raise e [ 728.989230] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 728.989230] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] nwinfo = self.network_api.allocate_for_instance( [ 728.989230] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 728.989230] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] created_port_ids = self._update_ports_for_instance( [ 728.989230] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 728.989230] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] with excutils.save_and_reraise_exception(): [ 728.989230] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.989230] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] self.force_reraise() [ 728.989230] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.989826] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] raise self.value [ 728.989826] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 728.989826] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] updated_port = self._update_port( [ 728.989826] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.989826] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] _ensure_no_port_binding_failure(port) [ 728.989826] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.989826] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] raise exception.PortBindingFailed(port_id=port['id']) [ 728.989826] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] nova.exception.PortBindingFailed: Binding failed for port 61771b60-a29f-4695-b630-b1cc0dd7ad27, please check neutron logs for more information. [ 728.989826] env[61629]: ERROR nova.compute.manager [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] [ 728.989826] env[61629]: DEBUG nova.compute.utils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Binding failed for port 61771b60-a29f-4695-b630-b1cc0dd7ad27, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 728.990194] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.901s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.991677] env[61629]: INFO nova.compute.claims [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 728.995426] env[61629]: DEBUG nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Build of instance 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa was re-scheduled: Binding failed for port 61771b60-a29f-4695-b630-b1cc0dd7ad27, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 728.996083] env[61629]: DEBUG nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 728.996083] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Acquiring lock "refresh_cache-395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.996083] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Acquired lock "refresh_cache-395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.996262] env[61629]: DEBUG nova.network.neutron [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 729.350810] env[61629]: INFO nova.compute.manager [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 374062de-1242-44bd-b658-e8976f8c3b6c] Took 1.02 seconds to deallocate network for instance. [ 729.516628] env[61629]: DEBUG nova.network.neutron [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.585136] env[61629]: DEBUG nova.network.neutron [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.087976] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Releasing lock "refresh_cache-395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.088187] env[61629]: DEBUG nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 730.088364] env[61629]: DEBUG nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 730.088525] env[61629]: DEBUG nova.network.neutron [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 730.105116] env[61629]: DEBUG nova.network.neutron [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.301410] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94a1165-2828-43bc-9b46-23b6b08b8d05 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.308933] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b597ab-6ae8-4a2a-99d5-227c340d924b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.338465] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3414a00-b8ef-4c40-bc32-a4c941fcae59 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.347034] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc513a3-ade0-44ab-b825-de327321c857 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.363862] env[61629]: DEBUG nova.compute.provider_tree [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 730.387172] env[61629]: INFO nova.scheduler.client.report [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Deleted allocations for instance 374062de-1242-44bd-b658-e8976f8c3b6c [ 730.607937] env[61629]: DEBUG nova.network.neutron [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.868486] env[61629]: DEBUG nova.scheduler.client.report [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 730.895991] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cb8ff6cd-122c-499f-86cd-284f4c1179ca tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "374062de-1242-44bd-b658-e8976f8c3b6c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.693s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.113547] env[61629]: INFO nova.compute.manager [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] [instance: 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa] Took 1.02 seconds to deallocate network for instance. [ 731.373627] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.383s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.374220] env[61629]: DEBUG nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 731.376889] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.509s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.400664] env[61629]: DEBUG nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 731.886375] env[61629]: DEBUG nova.compute.utils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 731.887661] env[61629]: DEBUG nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 731.887829] env[61629]: DEBUG nova.network.neutron [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 731.927818] env[61629]: DEBUG oslo_concurrency.lockutils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.965851] env[61629]: DEBUG nova.policy [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e28dac2dd40476fbaecbe1e380815ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5810b150c2146a58b8ceb44592a03e9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 732.147441] env[61629]: INFO nova.scheduler.client.report [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Deleted allocations for instance 395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa [ 732.300170] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37e93cd-aa82-4570-a208-869da5640645 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.307724] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821a2f03-bf17-4be6-83a8-00b6082ae151 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.345649] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beba9e72-afde-4bdd-81a7-d7dfd9d5a85c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.349813] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98470bfb-ad22-4831-bf5b-fcbf1bc8037c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.355452] env[61629]: DEBUG nova.network.neutron [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Successfully created port: c24f97be-7a5e-4797-a8f6-c9cab32596b2 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 732.367217] env[61629]: DEBUG nova.compute.provider_tree [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.390828] env[61629]: DEBUG nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 732.657636] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3c32391c-5453-4ebf-86dd-fbe274c6a779 tempest-InstanceActionsTestJSON-55712801 tempest-InstanceActionsTestJSON-55712801-project-member] Lock "395cd695-c6a0-4c7e-ad0b-8ee5d4f64cfa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 156.099s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.870102] env[61629]: DEBUG nova.scheduler.client.report [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 733.147471] env[61629]: DEBUG nova.compute.manager [req-24a57539-4bd3-4bb3-9d30-534774a03e5f req-0e8a4472-efd0-43eb-8c70-79f5be393c07 service nova] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Received event network-changed-c24f97be-7a5e-4797-a8f6-c9cab32596b2 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 733.147771] env[61629]: DEBUG nova.compute.manager [req-24a57539-4bd3-4bb3-9d30-534774a03e5f req-0e8a4472-efd0-43eb-8c70-79f5be393c07 service nova] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Refreshing instance network info cache due to event network-changed-c24f97be-7a5e-4797-a8f6-c9cab32596b2. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 733.147923] env[61629]: DEBUG oslo_concurrency.lockutils [req-24a57539-4bd3-4bb3-9d30-534774a03e5f req-0e8a4472-efd0-43eb-8c70-79f5be393c07 service nova] Acquiring lock "refresh_cache-3a804973-af62-4de1-a4ee-5943209c5884" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.148045] env[61629]: DEBUG oslo_concurrency.lockutils [req-24a57539-4bd3-4bb3-9d30-534774a03e5f req-0e8a4472-efd0-43eb-8c70-79f5be393c07 service nova] Acquired lock "refresh_cache-3a804973-af62-4de1-a4ee-5943209c5884" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.148217] env[61629]: DEBUG nova.network.neutron [req-24a57539-4bd3-4bb3-9d30-534774a03e5f req-0e8a4472-efd0-43eb-8c70-79f5be393c07 service nova] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Refreshing network info cache for port c24f97be-7a5e-4797-a8f6-c9cab32596b2 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 733.159212] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "3cabd3ef-590a-41f3-a611-3d27b4853db5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.160084] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "3cabd3ef-590a-41f3-a611-3d27b4853db5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.160661] env[61629]: DEBUG nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 733.335793] env[61629]: ERROR nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c24f97be-7a5e-4797-a8f6-c9cab32596b2, please check neutron logs for more information. [ 733.335793] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 733.335793] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 733.335793] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 733.335793] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 733.335793] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 733.335793] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 733.335793] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 733.335793] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.335793] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 733.335793] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.335793] env[61629]: ERROR nova.compute.manager raise self.value [ 733.335793] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 733.335793] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 733.335793] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.335793] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 733.336266] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.336266] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 733.336266] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c24f97be-7a5e-4797-a8f6-c9cab32596b2, please check neutron logs for more information. [ 733.336266] env[61629]: ERROR nova.compute.manager [ 733.336266] env[61629]: Traceback (most recent call last): [ 733.336266] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 733.336266] env[61629]: listener.cb(fileno) [ 733.336266] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 733.336266] env[61629]: result = function(*args, **kwargs) [ 733.336266] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 733.336266] env[61629]: return func(*args, **kwargs) [ 733.336266] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 733.336266] env[61629]: raise e [ 733.336266] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 733.336266] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 733.336266] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 733.336266] env[61629]: created_port_ids = self._update_ports_for_instance( [ 733.336266] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 733.336266] env[61629]: with excutils.save_and_reraise_exception(): [ 733.336266] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.336266] env[61629]: self.force_reraise() [ 733.336266] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.336266] env[61629]: raise self.value [ 733.336266] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 733.336266] env[61629]: updated_port = self._update_port( [ 733.336266] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.336266] env[61629]: _ensure_no_port_binding_failure(port) [ 733.336266] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.336266] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 733.337046] env[61629]: nova.exception.PortBindingFailed: Binding failed for port c24f97be-7a5e-4797-a8f6-c9cab32596b2, please check neutron logs for more information. [ 733.337046] env[61629]: Removing descriptor: 15 [ 733.375108] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.998s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.375886] env[61629]: ERROR nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 914c32fe-3e51-4b60-92d0-6d1abe056601, please check neutron logs for more information. [ 733.375886] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Traceback (most recent call last): [ 733.375886] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 733.375886] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] self.driver.spawn(context, instance, image_meta, [ 733.375886] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 733.375886] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] self._vmops.spawn(context, instance, image_meta, injected_files, [ 733.375886] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 733.375886] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] vm_ref = self.build_virtual_machine(instance, [ 733.375886] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 733.375886] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] vif_infos = vmwarevif.get_vif_info(self._session, [ 733.375886] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 733.376227] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] for vif in network_info: [ 733.376227] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 733.376227] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] return self._sync_wrapper(fn, *args, **kwargs) [ 733.376227] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 733.376227] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] self.wait() [ 733.376227] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 733.376227] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] self[:] = self._gt.wait() [ 733.376227] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 733.376227] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] return self._exit_event.wait() [ 733.376227] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 733.376227] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] current.throw(*self._exc) [ 733.376227] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 733.376227] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] result = function(*args, **kwargs) [ 733.376567] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 733.376567] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] return func(*args, **kwargs) [ 733.376567] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 733.376567] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] raise e [ 733.376567] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 733.376567] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] nwinfo = self.network_api.allocate_for_instance( [ 733.376567] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 733.376567] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] created_port_ids = self._update_ports_for_instance( [ 733.376567] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 733.376567] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] with excutils.save_and_reraise_exception(): [ 733.376567] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.376567] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] self.force_reraise() [ 733.376567] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.377222] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] raise self.value [ 733.377222] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 733.377222] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] updated_port = self._update_port( [ 733.377222] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.377222] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] _ensure_no_port_binding_failure(port) [ 733.377222] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.377222] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] raise exception.PortBindingFailed(port_id=port['id']) [ 733.377222] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] nova.exception.PortBindingFailed: Binding failed for port 914c32fe-3e51-4b60-92d0-6d1abe056601, please check neutron logs for more information. [ 733.377222] env[61629]: ERROR nova.compute.manager [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] [ 733.377222] env[61629]: DEBUG nova.compute.utils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Binding failed for port 914c32fe-3e51-4b60-92d0-6d1abe056601, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 733.377877] env[61629]: DEBUG oslo_concurrency.lockutils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.876s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.380869] env[61629]: DEBUG nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Build of instance 18b4e8c7-3517-46b2-b0a1-8d17bb222874 was re-scheduled: Binding failed for port 914c32fe-3e51-4b60-92d0-6d1abe056601, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 733.381340] env[61629]: DEBUG nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 733.381562] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Acquiring lock "refresh_cache-18b4e8c7-3517-46b2-b0a1-8d17bb222874" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.381708] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Acquired lock "refresh_cache-18b4e8c7-3517-46b2-b0a1-8d17bb222874" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.381866] env[61629]: DEBUG nova.network.neutron [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 733.399172] env[61629]: DEBUG nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 733.429270] env[61629]: DEBUG nova.virt.hardware [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 733.429521] env[61629]: DEBUG nova.virt.hardware [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 733.429672] env[61629]: DEBUG nova.virt.hardware [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 733.429848] env[61629]: DEBUG nova.virt.hardware [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 733.430106] env[61629]: DEBUG nova.virt.hardware [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 733.430169] env[61629]: DEBUG nova.virt.hardware [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 733.430499] env[61629]: DEBUG nova.virt.hardware [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 733.430499] env[61629]: DEBUG nova.virt.hardware [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 733.430643] env[61629]: DEBUG nova.virt.hardware [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 733.430808] env[61629]: DEBUG nova.virt.hardware [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 733.430971] env[61629]: DEBUG nova.virt.hardware [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 733.432149] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd2e4d6-e3ba-4244-b473-13e7f2e3379f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.441973] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed851fc-8afe-4a20-87f6-078a132e68c2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.457029] env[61629]: ERROR nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c24f97be-7a5e-4797-a8f6-c9cab32596b2, please check neutron logs for more information. [ 733.457029] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Traceback (most recent call last): [ 733.457029] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 733.457029] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] yield resources [ 733.457029] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 733.457029] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] self.driver.spawn(context, instance, image_meta, [ 733.457029] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 733.457029] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] self._vmops.spawn(context, instance, image_meta, injected_files, [ 733.457029] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 733.457029] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] vm_ref = self.build_virtual_machine(instance, [ 733.457029] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 733.457416] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] vif_infos = vmwarevif.get_vif_info(self._session, [ 733.457416] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 733.457416] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] for vif in network_info: [ 733.457416] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 733.457416] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] return self._sync_wrapper(fn, *args, **kwargs) [ 733.457416] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 733.457416] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] self.wait() [ 733.457416] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 733.457416] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] self[:] = self._gt.wait() [ 733.457416] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 733.457416] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] return self._exit_event.wait() [ 733.457416] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 733.457416] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] current.throw(*self._exc) [ 733.457767] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 733.457767] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] result = function(*args, **kwargs) [ 733.457767] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 733.457767] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] return func(*args, **kwargs) [ 733.457767] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 733.457767] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] raise e [ 733.457767] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 733.457767] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] nwinfo = self.network_api.allocate_for_instance( [ 733.457767] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 733.457767] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] created_port_ids = self._update_ports_for_instance( [ 733.457767] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 733.457767] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] with excutils.save_and_reraise_exception(): [ 733.457767] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.458140] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] self.force_reraise() [ 733.458140] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.458140] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] raise self.value [ 733.458140] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 733.458140] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] updated_port = self._update_port( [ 733.458140] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.458140] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] _ensure_no_port_binding_failure(port) [ 733.458140] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.458140] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] raise exception.PortBindingFailed(port_id=port['id']) [ 733.458140] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] nova.exception.PortBindingFailed: Binding failed for port c24f97be-7a5e-4797-a8f6-c9cab32596b2, please check neutron logs for more information. [ 733.458140] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] [ 733.458140] env[61629]: INFO nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Terminating instance [ 733.461190] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquiring lock "refresh_cache-3a804973-af62-4de1-a4ee-5943209c5884" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.669034] env[61629]: DEBUG nova.network.neutron [req-24a57539-4bd3-4bb3-9d30-534774a03e5f req-0e8a4472-efd0-43eb-8c70-79f5be393c07 service nova] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.683919] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.761362] env[61629]: DEBUG nova.network.neutron [req-24a57539-4bd3-4bb3-9d30-534774a03e5f req-0e8a4472-efd0-43eb-8c70-79f5be393c07 service nova] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.931854] env[61629]: DEBUG nova.network.neutron [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.032224] env[61629]: DEBUG nova.network.neutron [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.228041] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7901166f-4a65-466b-a692-583285c453ef {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.236025] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6781bab4-a592-4779-a75a-5ee350ac6c36 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.266623] env[61629]: DEBUG oslo_concurrency.lockutils [req-24a57539-4bd3-4bb3-9d30-534774a03e5f req-0e8a4472-efd0-43eb-8c70-79f5be393c07 service nova] Releasing lock "refresh_cache-3a804973-af62-4de1-a4ee-5943209c5884" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.267274] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquired lock "refresh_cache-3a804973-af62-4de1-a4ee-5943209c5884" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.267602] env[61629]: DEBUG nova.network.neutron [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 734.271811] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d48787f-4c50-4718-9d76-9b2f9284daf0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.277204] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bce0fef-e117-4615-8b38-e14371914b09 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.293126] env[61629]: DEBUG nova.compute.provider_tree [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.535045] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Releasing lock "refresh_cache-18b4e8c7-3517-46b2-b0a1-8d17bb222874" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.535336] env[61629]: DEBUG nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 734.535500] env[61629]: DEBUG nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 734.535666] env[61629]: DEBUG nova.network.neutron [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 734.554793] env[61629]: DEBUG nova.network.neutron [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.794543] env[61629]: DEBUG nova.network.neutron [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.796984] env[61629]: DEBUG nova.scheduler.client.report [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 734.908817] env[61629]: DEBUG nova.network.neutron [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.056963] env[61629]: DEBUG nova.network.neutron [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.171321] env[61629]: DEBUG nova.compute.manager [req-01a88ab3-0339-4cf7-a44e-b2dac82e9584 req-7353867a-2dd5-4d0d-b4c7-b177e3a4e047 service nova] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Received event network-vif-deleted-c24f97be-7a5e-4797-a8f6-c9cab32596b2 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 735.302036] env[61629]: DEBUG oslo_concurrency.lockutils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.924s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.302367] env[61629]: ERROR nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1edcb2db-1404-4ebb-a079-e7a025dc6acf, please check neutron logs for more information. [ 735.302367] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Traceback (most recent call last): [ 735.302367] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 735.302367] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] self.driver.spawn(context, instance, image_meta, [ 735.302367] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 735.302367] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] self._vmops.spawn(context, instance, image_meta, injected_files, [ 735.302367] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 735.302367] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] vm_ref = self.build_virtual_machine(instance, [ 735.302367] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 735.302367] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] vif_infos = vmwarevif.get_vif_info(self._session, [ 735.302367] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 735.302690] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] for vif in network_info: [ 735.302690] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 735.302690] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] return self._sync_wrapper(fn, *args, **kwargs) [ 735.302690] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 735.302690] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] self.wait() [ 735.302690] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 735.302690] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] self[:] = self._gt.wait() [ 735.302690] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 735.302690] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] return self._exit_event.wait() [ 735.302690] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 735.302690] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] current.throw(*self._exc) [ 735.302690] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 735.302690] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] result = function(*args, **kwargs) [ 735.303083] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 735.303083] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] return func(*args, **kwargs) [ 735.303083] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 735.303083] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] raise e [ 735.303083] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 735.303083] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] nwinfo = self.network_api.allocate_for_instance( [ 735.303083] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 735.303083] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] created_port_ids = self._update_ports_for_instance( [ 735.303083] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 735.303083] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] with excutils.save_and_reraise_exception(): [ 735.303083] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 735.303083] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] self.force_reraise() [ 735.303083] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 735.303514] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] raise self.value [ 735.303514] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 735.303514] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] updated_port = self._update_port( [ 735.303514] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 735.303514] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] _ensure_no_port_binding_failure(port) [ 735.303514] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 735.303514] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] raise exception.PortBindingFailed(port_id=port['id']) [ 735.303514] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] nova.exception.PortBindingFailed: Binding failed for port 1edcb2db-1404-4ebb-a079-e7a025dc6acf, please check neutron logs for more information. [ 735.303514] env[61629]: ERROR nova.compute.manager [instance: 54e03464-0f37-4f4d-8746-821e73da0541] [ 735.303514] env[61629]: DEBUG nova.compute.utils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Binding failed for port 1edcb2db-1404-4ebb-a079-e7a025dc6acf, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 735.304529] env[61629]: DEBUG oslo_concurrency.lockutils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.325s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.306964] env[61629]: INFO nova.compute.claims [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 735.310071] env[61629]: DEBUG nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Build of instance 54e03464-0f37-4f4d-8746-821e73da0541 was re-scheduled: Binding failed for port 1edcb2db-1404-4ebb-a079-e7a025dc6acf, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 735.310447] env[61629]: DEBUG nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 735.310624] env[61629]: DEBUG oslo_concurrency.lockutils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "refresh_cache-54e03464-0f37-4f4d-8746-821e73da0541" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.310770] env[61629]: DEBUG oslo_concurrency.lockutils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "refresh_cache-54e03464-0f37-4f4d-8746-821e73da0541" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.310928] env[61629]: DEBUG nova.network.neutron [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 735.411805] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Releasing lock "refresh_cache-3a804973-af62-4de1-a4ee-5943209c5884" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.412273] env[61629]: DEBUG nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 735.412467] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 735.412755] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b7411cb7-2bce-4918-be9a-7bc98f7837c8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.422316] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c63fa5-5610-4e1b-8b9c-e824eb5fb3b5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.446929] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3a804973-af62-4de1-a4ee-5943209c5884 could not be found. [ 735.447340] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 735.447340] env[61629]: INFO nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Took 0.03 seconds to destroy the instance on the hypervisor. [ 735.447592] env[61629]: DEBUG oslo.service.loopingcall [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 735.447944] env[61629]: DEBUG nova.compute.manager [-] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 735.447944] env[61629]: DEBUG nova.network.neutron [-] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 735.462220] env[61629]: DEBUG nova.network.neutron [-] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.559989] env[61629]: INFO nova.compute.manager [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] [instance: 18b4e8c7-3517-46b2-b0a1-8d17bb222874] Took 1.02 seconds to deallocate network for instance. [ 735.833399] env[61629]: DEBUG nova.network.neutron [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.947904] env[61629]: DEBUG nova.network.neutron [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.965026] env[61629]: DEBUG nova.network.neutron [-] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.451051] env[61629]: DEBUG oslo_concurrency.lockutils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "refresh_cache-54e03464-0f37-4f4d-8746-821e73da0541" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.451409] env[61629]: DEBUG nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 736.451637] env[61629]: DEBUG nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 736.452104] env[61629]: DEBUG nova.network.neutron [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 736.467374] env[61629]: INFO nova.compute.manager [-] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Took 1.02 seconds to deallocate network for instance. [ 736.469599] env[61629]: DEBUG nova.network.neutron [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 736.470934] env[61629]: DEBUG nova.compute.claims [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 736.471155] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.593150] env[61629]: INFO nova.scheduler.client.report [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Deleted allocations for instance 18b4e8c7-3517-46b2-b0a1-8d17bb222874 [ 736.612156] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac9626b-bafd-4d23-bbe4-40a2813cb16e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.621393] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ea4ef4-2000-4572-90a5-0dde9b09a8e0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.653924] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d983f842-b0e4-4d37-9511-fbcc45293be4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.662207] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3743d4c7-a359-49e3-ba99-ce9f68c87daf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.675466] env[61629]: DEBUG nova.compute.provider_tree [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.972096] env[61629]: DEBUG nova.network.neutron [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.102658] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7d5dab3c-8896-4937-b1a4-3de47821b345 tempest-ServersAdminNegativeTestJSON-485290538 tempest-ServersAdminNegativeTestJSON-485290538-project-member] Lock "18b4e8c7-3517-46b2-b0a1-8d17bb222874" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 159.023s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.179343] env[61629]: DEBUG nova.scheduler.client.report [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 737.475083] env[61629]: INFO nova.compute.manager [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 54e03464-0f37-4f4d-8746-821e73da0541] Took 1.02 seconds to deallocate network for instance. [ 737.605850] env[61629]: DEBUG nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 737.684547] env[61629]: DEBUG oslo_concurrency.lockutils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.684547] env[61629]: DEBUG nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 737.686908] env[61629]: DEBUG oslo_concurrency.lockutils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.785s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.126978] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.191567] env[61629]: DEBUG nova.compute.utils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 738.196281] env[61629]: DEBUG nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 738.196452] env[61629]: DEBUG nova.network.neutron [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 738.263833] env[61629]: DEBUG nova.policy [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fcf96129b2241c7a58d27d7e1ee3316', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c8b2fda65404811a22673b564d9a156', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 738.503663] env[61629]: INFO nova.scheduler.client.report [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Deleted allocations for instance 54e03464-0f37-4f4d-8746-821e73da0541 [ 738.549826] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094e5dc9-6b8a-441d-8818-406c539992b3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.559368] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c89269-239a-4366-ba04-8c49b93e72f1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.597494] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3b8e18-bee2-4298-925d-3a53d4a0e24f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.604911] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5fda88-e4ab-4f9e-a800-fb3d959ef311 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.621843] env[61629]: DEBUG nova.compute.provider_tree [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.697329] env[61629]: DEBUG nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 738.787505] env[61629]: DEBUG nova.network.neutron [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Successfully created port: 804a1b0f-457e-4300-b463-8dc58d2b784f {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 739.014273] env[61629]: DEBUG oslo_concurrency.lockutils [None req-855525da-7aa0-4487-9ab9-9a886ee2e2c5 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "54e03464-0f37-4f4d-8746-821e73da0541" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 159.832s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.123910] env[61629]: DEBUG nova.scheduler.client.report [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 739.205665] env[61629]: INFO nova.virt.block_device [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Booting with volume e9669362-c0b6-4963-8dc6-bb41f389b0ec at /dev/sda [ 739.257801] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-366af304-9ae4-4137-85a0-510eb0f037eb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.272016] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb522051-1cdf-4c80-973d-c386bc23ee8b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.296259] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57d80477-c4a6-4fcc-a1f0-99a4fc7a3aa3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.302370] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133baf02-c6b1-4b28-8cb3-29f0b1b69957 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.327891] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492cc484-6048-4707-8b31-513a024dd176 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.334594] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3067e71-90b2-4a4c-be84-1d49629dfb62 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.350024] env[61629]: DEBUG nova.virt.block_device [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Updating existing volume attachment record: 2adfa8ca-4f74-4edd-9269-158ddc242587 {{(pid=61629) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 739.516957] env[61629]: DEBUG nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 739.632689] env[61629]: DEBUG oslo_concurrency.lockutils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.946s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.633401] env[61629]: ERROR nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9882889a-9537-4827-a883-dcc6746b2eaf, please check neutron logs for more information. [ 739.633401] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Traceback (most recent call last): [ 739.633401] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 739.633401] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] self.driver.spawn(context, instance, image_meta, [ 739.633401] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 739.633401] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] self._vmops.spawn(context, instance, image_meta, injected_files, [ 739.633401] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 739.633401] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] vm_ref = self.build_virtual_machine(instance, [ 739.633401] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 739.633401] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] vif_infos = vmwarevif.get_vif_info(self._session, [ 739.633401] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 739.633735] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] for vif in network_info: [ 739.633735] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 739.633735] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] return self._sync_wrapper(fn, *args, **kwargs) [ 739.633735] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 739.633735] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] self.wait() [ 739.633735] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 739.633735] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] self[:] = self._gt.wait() [ 739.633735] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 739.633735] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] return self._exit_event.wait() [ 739.633735] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 739.633735] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] current.throw(*self._exc) [ 739.633735] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 739.633735] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] result = function(*args, **kwargs) [ 739.634095] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 739.634095] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] return func(*args, **kwargs) [ 739.634095] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 739.634095] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] raise e [ 739.634095] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 739.634095] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] nwinfo = self.network_api.allocate_for_instance( [ 739.634095] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 739.634095] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] created_port_ids = self._update_ports_for_instance( [ 739.634095] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 739.634095] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] with excutils.save_and_reraise_exception(): [ 739.634095] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 739.634095] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] self.force_reraise() [ 739.634095] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 739.634642] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] raise self.value [ 739.634642] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 739.634642] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] updated_port = self._update_port( [ 739.634642] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 739.634642] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] _ensure_no_port_binding_failure(port) [ 739.634642] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 739.634642] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] raise exception.PortBindingFailed(port_id=port['id']) [ 739.634642] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] nova.exception.PortBindingFailed: Binding failed for port 9882889a-9537-4827-a883-dcc6746b2eaf, please check neutron logs for more information. [ 739.634642] env[61629]: ERROR nova.compute.manager [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] [ 739.634642] env[61629]: DEBUG nova.compute.utils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Binding failed for port 9882889a-9537-4827-a883-dcc6746b2eaf, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 739.635936] env[61629]: DEBUG oslo_concurrency.lockutils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.904s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.640351] env[61629]: DEBUG nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Build of instance 71a5a130-fd26-4cf5-9b27-520f9eb62c55 was re-scheduled: Binding failed for port 9882889a-9537-4827-a883-dcc6746b2eaf, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 739.641112] env[61629]: DEBUG nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 739.641335] env[61629]: DEBUG oslo_concurrency.lockutils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "refresh_cache-71a5a130-fd26-4cf5-9b27-520f9eb62c55" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.641891] env[61629]: DEBUG oslo_concurrency.lockutils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquired lock "refresh_cache-71a5a130-fd26-4cf5-9b27-520f9eb62c55" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.641891] env[61629]: DEBUG nova.network.neutron [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 739.826682] env[61629]: DEBUG nova.compute.manager [req-cd3de39b-a3c8-4568-bcbc-4dbfc5db0e33 req-9e6a27fc-6de0-42b0-9d19-fb85d73c3470 service nova] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Received event network-changed-804a1b0f-457e-4300-b463-8dc58d2b784f {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 739.826865] env[61629]: DEBUG nova.compute.manager [req-cd3de39b-a3c8-4568-bcbc-4dbfc5db0e33 req-9e6a27fc-6de0-42b0-9d19-fb85d73c3470 service nova] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Refreshing instance network info cache due to event network-changed-804a1b0f-457e-4300-b463-8dc58d2b784f. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 739.827111] env[61629]: DEBUG oslo_concurrency.lockutils [req-cd3de39b-a3c8-4568-bcbc-4dbfc5db0e33 req-9e6a27fc-6de0-42b0-9d19-fb85d73c3470 service nova] Acquiring lock "refresh_cache-012e6d9c-0f02-4761-9639-9a8e8972ea2b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.827262] env[61629]: DEBUG oslo_concurrency.lockutils [req-cd3de39b-a3c8-4568-bcbc-4dbfc5db0e33 req-9e6a27fc-6de0-42b0-9d19-fb85d73c3470 service nova] Acquired lock "refresh_cache-012e6d9c-0f02-4761-9639-9a8e8972ea2b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.827418] env[61629]: DEBUG nova.network.neutron [req-cd3de39b-a3c8-4568-bcbc-4dbfc5db0e33 req-9e6a27fc-6de0-42b0-9d19-fb85d73c3470 service nova] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Refreshing network info cache for port 804a1b0f-457e-4300-b463-8dc58d2b784f {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 740.039706] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.066710] env[61629]: ERROR nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 804a1b0f-457e-4300-b463-8dc58d2b784f, please check neutron logs for more information. [ 740.066710] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 740.066710] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 740.066710] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 740.066710] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.066710] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 740.066710] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.066710] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 740.066710] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.066710] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 740.066710] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.066710] env[61629]: ERROR nova.compute.manager raise self.value [ 740.066710] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.066710] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 740.066710] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.066710] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 740.067212] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.067212] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 740.067212] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 804a1b0f-457e-4300-b463-8dc58d2b784f, please check neutron logs for more information. [ 740.067212] env[61629]: ERROR nova.compute.manager [ 740.067212] env[61629]: Traceback (most recent call last): [ 740.067212] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 740.067212] env[61629]: listener.cb(fileno) [ 740.067212] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 740.067212] env[61629]: result = function(*args, **kwargs) [ 740.067212] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 740.067212] env[61629]: return func(*args, **kwargs) [ 740.067212] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 740.067212] env[61629]: raise e [ 740.067212] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 740.067212] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 740.067212] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.067212] env[61629]: created_port_ids = self._update_ports_for_instance( [ 740.067212] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.067212] env[61629]: with excutils.save_and_reraise_exception(): [ 740.067212] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.067212] env[61629]: self.force_reraise() [ 740.067212] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.067212] env[61629]: raise self.value [ 740.067212] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.067212] env[61629]: updated_port = self._update_port( [ 740.067212] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.067212] env[61629]: _ensure_no_port_binding_failure(port) [ 740.067212] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.067212] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 740.068685] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 804a1b0f-457e-4300-b463-8dc58d2b784f, please check neutron logs for more information. [ 740.068685] env[61629]: Removing descriptor: 15 [ 740.165562] env[61629]: DEBUG nova.network.neutron [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.244881] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "c1bb3820-0c77-4a7e-bcce-17d5e6793ab9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.244881] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "c1bb3820-0c77-4a7e-bcce-17d5e6793ab9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.267177] env[61629]: DEBUG nova.network.neutron [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.349276] env[61629]: DEBUG nova.network.neutron [req-cd3de39b-a3c8-4568-bcbc-4dbfc5db0e33 req-9e6a27fc-6de0-42b0-9d19-fb85d73c3470 service nova] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.437742] env[61629]: DEBUG nova.network.neutron [req-cd3de39b-a3c8-4568-bcbc-4dbfc5db0e33 req-9e6a27fc-6de0-42b0-9d19-fb85d73c3470 service nova] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.506731] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f31a697-c3af-4a80-876c-6b7ed6b00502 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.515306] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91902748-b574-4e94-9a7a-3b5c46ba6d40 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.546207] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c263589-23d4-4e0f-9491-c806253f9233 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.553288] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a349932e-0276-471b-a625-887591154443 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.566375] env[61629]: DEBUG nova.compute.provider_tree [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.770212] env[61629]: DEBUG oslo_concurrency.lockutils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Releasing lock "refresh_cache-71a5a130-fd26-4cf5-9b27-520f9eb62c55" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.770412] env[61629]: DEBUG nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 740.770594] env[61629]: DEBUG nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 740.770764] env[61629]: DEBUG nova.network.neutron [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 740.786739] env[61629]: DEBUG nova.network.neutron [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.940523] env[61629]: DEBUG oslo_concurrency.lockutils [req-cd3de39b-a3c8-4568-bcbc-4dbfc5db0e33 req-9e6a27fc-6de0-42b0-9d19-fb85d73c3470 service nova] Releasing lock "refresh_cache-012e6d9c-0f02-4761-9639-9a8e8972ea2b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.070464] env[61629]: DEBUG nova.scheduler.client.report [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 741.290065] env[61629]: DEBUG nova.network.neutron [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.474467] env[61629]: DEBUG nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 741.474467] env[61629]: DEBUG nova.virt.hardware [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 741.474467] env[61629]: DEBUG nova.virt.hardware [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 741.474467] env[61629]: DEBUG nova.virt.hardware [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.474662] env[61629]: DEBUG nova.virt.hardware [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 741.474662] env[61629]: DEBUG nova.virt.hardware [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.474662] env[61629]: DEBUG nova.virt.hardware [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 741.474662] env[61629]: DEBUG nova.virt.hardware [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 741.474662] env[61629]: DEBUG nova.virt.hardware [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 741.474838] env[61629]: DEBUG nova.virt.hardware [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 741.475018] env[61629]: DEBUG nova.virt.hardware [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 741.475389] env[61629]: DEBUG nova.virt.hardware [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 741.476821] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7286f450-6273-4420-aa50-b1dcef6d64d7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.485509] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af94e9d-6fc7-4004-9348-85e8c54f46a9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.500413] env[61629]: ERROR nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 804a1b0f-457e-4300-b463-8dc58d2b784f, please check neutron logs for more information. [ 741.500413] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Traceback (most recent call last): [ 741.500413] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 741.500413] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] yield resources [ 741.500413] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 741.500413] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] self.driver.spawn(context, instance, image_meta, [ 741.500413] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 741.500413] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 741.500413] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 741.500413] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] vm_ref = self.build_virtual_machine(instance, [ 741.500413] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 741.500853] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] vif_infos = vmwarevif.get_vif_info(self._session, [ 741.500853] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 741.500853] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] for vif in network_info: [ 741.500853] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 741.500853] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] return self._sync_wrapper(fn, *args, **kwargs) [ 741.500853] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 741.500853] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] self.wait() [ 741.500853] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 741.500853] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] self[:] = self._gt.wait() [ 741.500853] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 741.500853] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] return self._exit_event.wait() [ 741.500853] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 741.500853] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] current.throw(*self._exc) [ 741.501292] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 741.501292] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] result = function(*args, **kwargs) [ 741.501292] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 741.501292] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] return func(*args, **kwargs) [ 741.501292] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 741.501292] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] raise e [ 741.501292] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 741.501292] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] nwinfo = self.network_api.allocate_for_instance( [ 741.501292] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 741.501292] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] created_port_ids = self._update_ports_for_instance( [ 741.501292] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 741.501292] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] with excutils.save_and_reraise_exception(): [ 741.501292] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 741.501698] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] self.force_reraise() [ 741.501698] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 741.501698] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] raise self.value [ 741.501698] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 741.501698] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] updated_port = self._update_port( [ 741.501698] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 741.501698] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] _ensure_no_port_binding_failure(port) [ 741.501698] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 741.501698] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] raise exception.PortBindingFailed(port_id=port['id']) [ 741.501698] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] nova.exception.PortBindingFailed: Binding failed for port 804a1b0f-457e-4300-b463-8dc58d2b784f, please check neutron logs for more information. [ 741.501698] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] [ 741.503888] env[61629]: INFO nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Terminating instance [ 741.505748] env[61629]: DEBUG oslo_concurrency.lockutils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Acquiring lock "refresh_cache-012e6d9c-0f02-4761-9639-9a8e8972ea2b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.505748] env[61629]: DEBUG oslo_concurrency.lockutils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Acquired lock "refresh_cache-012e6d9c-0f02-4761-9639-9a8e8972ea2b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.505748] env[61629]: DEBUG nova.network.neutron [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.576210] env[61629]: DEBUG oslo_concurrency.lockutils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.940s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.576853] env[61629]: ERROR nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fe934045-b15c-49e0-b824-cd21c688263e, please check neutron logs for more information. [ 741.576853] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Traceback (most recent call last): [ 741.576853] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 741.576853] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] self.driver.spawn(context, instance, image_meta, [ 741.576853] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 741.576853] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 741.576853] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 741.576853] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] vm_ref = self.build_virtual_machine(instance, [ 741.576853] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 741.576853] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] vif_infos = vmwarevif.get_vif_info(self._session, [ 741.576853] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 741.577190] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] for vif in network_info: [ 741.577190] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 741.577190] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] return self._sync_wrapper(fn, *args, **kwargs) [ 741.577190] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 741.577190] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] self.wait() [ 741.577190] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 741.577190] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] self[:] = self._gt.wait() [ 741.577190] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 741.577190] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] return self._exit_event.wait() [ 741.577190] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 741.577190] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] result = hub.switch() [ 741.577190] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 741.577190] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] return self.greenlet.switch() [ 741.577541] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 741.577541] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] result = function(*args, **kwargs) [ 741.577541] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 741.577541] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] return func(*args, **kwargs) [ 741.577541] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 741.577541] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] raise e [ 741.577541] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 741.577541] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] nwinfo = self.network_api.allocate_for_instance( [ 741.577541] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 741.577541] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] created_port_ids = self._update_ports_for_instance( [ 741.577541] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 741.577541] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] with excutils.save_and_reraise_exception(): [ 741.577541] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 741.577888] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] self.force_reraise() [ 741.577888] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 741.577888] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] raise self.value [ 741.577888] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 741.577888] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] updated_port = self._update_port( [ 741.577888] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 741.577888] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] _ensure_no_port_binding_failure(port) [ 741.577888] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 741.577888] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] raise exception.PortBindingFailed(port_id=port['id']) [ 741.577888] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] nova.exception.PortBindingFailed: Binding failed for port fe934045-b15c-49e0-b824-cd21c688263e, please check neutron logs for more information. [ 741.577888] env[61629]: ERROR nova.compute.manager [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] [ 741.578200] env[61629]: DEBUG nova.compute.utils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Binding failed for port fe934045-b15c-49e0-b824-cd21c688263e, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 741.578751] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.652s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.582065] env[61629]: DEBUG nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Build of instance 6dd1097f-7353-4938-be2b-51c248e45fe2 was re-scheduled: Binding failed for port fe934045-b15c-49e0-b824-cd21c688263e, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 741.582558] env[61629]: DEBUG nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 741.582834] env[61629]: DEBUG oslo_concurrency.lockutils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Acquiring lock "refresh_cache-6dd1097f-7353-4938-be2b-51c248e45fe2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.583071] env[61629]: DEBUG oslo_concurrency.lockutils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Acquired lock "refresh_cache-6dd1097f-7353-4938-be2b-51c248e45fe2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.583364] env[61629]: DEBUG nova.network.neutron [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.794649] env[61629]: INFO nova.compute.manager [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: 71a5a130-fd26-4cf5-9b27-520f9eb62c55] Took 1.02 seconds to deallocate network for instance. [ 741.855383] env[61629]: DEBUG nova.compute.manager [req-8970c75f-2a54-4cfd-928b-e83b95c599f5 req-bcacc967-8b36-4ab3-8eb3-9f2a480ad279 service nova] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Received event network-vif-deleted-804a1b0f-457e-4300-b463-8dc58d2b784f {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 742.032100] env[61629]: DEBUG nova.network.neutron [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.107707] env[61629]: DEBUG nova.network.neutron [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.171668] env[61629]: DEBUG nova.network.neutron [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.184653] env[61629]: DEBUG nova.network.neutron [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.451167] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88d7f95-4cff-4d96-a2a5-95a7a449832d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.463432] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3c178c-50e5-4a7b-ab07-6b7e060e1310 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.494528] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b924e5-3ae3-4afa-bbb7-bafb8897ac65 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.501836] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670eae91-84e4-4667-8fb0-593e910be95d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.516116] env[61629]: DEBUG nova.compute.provider_tree [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.678545] env[61629]: DEBUG oslo_concurrency.lockutils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Releasing lock "refresh_cache-012e6d9c-0f02-4761-9639-9a8e8972ea2b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.679133] env[61629]: DEBUG nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 742.679462] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-679f32ae-2c25-4853-ae1a-3c18f92eccbe {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.689515] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c68faa7-94ef-4ea8-af9f-e9a1aeb2651a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.700726] env[61629]: DEBUG oslo_concurrency.lockutils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Releasing lock "refresh_cache-6dd1097f-7353-4938-be2b-51c248e45fe2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.702812] env[61629]: DEBUG nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 742.702812] env[61629]: DEBUG nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 742.702812] env[61629]: DEBUG nova.network.neutron [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.716600] env[61629]: WARNING nova.virt.vmwareapi.driver [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 012e6d9c-0f02-4761-9639-9a8e8972ea2b could not be found. [ 742.716600] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 742.716889] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e9d359a1-1333-40da-b54b-3a09ca01ea11 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.724357] env[61629]: DEBUG nova.network.neutron [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.730097] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c409bf-b193-41ab-a5b1-00b6683f44bd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.742180] env[61629]: DEBUG nova.network.neutron [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.757053] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 012e6d9c-0f02-4761-9639-9a8e8972ea2b could not be found. [ 742.757314] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 742.757518] env[61629]: INFO nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Took 0.08 seconds to destroy the instance on the hypervisor. [ 742.757787] env[61629]: DEBUG oslo.service.loopingcall [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 742.758265] env[61629]: DEBUG nova.compute.manager [-] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 742.758313] env[61629]: DEBUG nova.network.neutron [-] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.776173] env[61629]: DEBUG nova.network.neutron [-] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.850969] env[61629]: INFO nova.scheduler.client.report [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Deleted allocations for instance 71a5a130-fd26-4cf5-9b27-520f9eb62c55 [ 743.019456] env[61629]: DEBUG nova.scheduler.client.report [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 743.244760] env[61629]: INFO nova.compute.manager [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] [instance: 6dd1097f-7353-4938-be2b-51c248e45fe2] Took 0.54 seconds to deallocate network for instance. [ 743.279487] env[61629]: DEBUG nova.network.neutron [-] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.311871] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.315673] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.360716] env[61629]: DEBUG oslo_concurrency.lockutils [None req-33930a67-cee1-44fb-b004-671adf7819ec tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "71a5a130-fd26-4cf5-9b27-520f9eb62c55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 163.692s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.526467] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.948s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.527122] env[61629]: ERROR nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 61e08638-0935-43a7-9938-7d14435a76dd, please check neutron logs for more information. [ 743.527122] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Traceback (most recent call last): [ 743.527122] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 743.527122] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] self.driver.spawn(context, instance, image_meta, [ 743.527122] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 743.527122] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] self._vmops.spawn(context, instance, image_meta, injected_files, [ 743.527122] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 743.527122] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] vm_ref = self.build_virtual_machine(instance, [ 743.527122] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 743.527122] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] vif_infos = vmwarevif.get_vif_info(self._session, [ 743.527122] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 743.527488] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] for vif in network_info: [ 743.527488] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 743.527488] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] return self._sync_wrapper(fn, *args, **kwargs) [ 743.527488] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 743.527488] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] self.wait() [ 743.527488] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 743.527488] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] self[:] = self._gt.wait() [ 743.527488] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 743.527488] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] return self._exit_event.wait() [ 743.527488] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 743.527488] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] current.throw(*self._exc) [ 743.527488] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 743.527488] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] result = function(*args, **kwargs) [ 743.527909] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 743.527909] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] return func(*args, **kwargs) [ 743.527909] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 743.527909] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] raise e [ 743.527909] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 743.527909] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] nwinfo = self.network_api.allocate_for_instance( [ 743.527909] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 743.527909] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] created_port_ids = self._update_ports_for_instance( [ 743.527909] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 743.527909] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] with excutils.save_and_reraise_exception(): [ 743.527909] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.527909] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] self.force_reraise() [ 743.527909] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.528340] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] raise self.value [ 743.528340] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 743.528340] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] updated_port = self._update_port( [ 743.528340] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.528340] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] _ensure_no_port_binding_failure(port) [ 743.528340] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.528340] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] raise exception.PortBindingFailed(port_id=port['id']) [ 743.528340] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] nova.exception.PortBindingFailed: Binding failed for port 61e08638-0935-43a7-9938-7d14435a76dd, please check neutron logs for more information. [ 743.528340] env[61629]: ERROR nova.compute.manager [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] [ 743.528340] env[61629]: DEBUG nova.compute.utils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Binding failed for port 61e08638-0935-43a7-9938-7d14435a76dd, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 743.529538] env[61629]: DEBUG nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Build of instance 39f7c5ee-7d07-4516-b008-40d5778cf139 was re-scheduled: Binding failed for port 61e08638-0935-43a7-9938-7d14435a76dd, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 743.529938] env[61629]: DEBUG nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 743.530193] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Acquiring lock "refresh_cache-39f7c5ee-7d07-4516-b008-40d5778cf139" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.530356] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Acquired lock "refresh_cache-39f7c5ee-7d07-4516-b008-40d5778cf139" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.530515] env[61629]: DEBUG nova.network.neutron [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 743.531848] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.079s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.782631] env[61629]: INFO nova.compute.manager [-] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Took 1.02 seconds to deallocate network for instance. [ 743.862600] env[61629]: DEBUG nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 744.066568] env[61629]: DEBUG nova.network.neutron [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.226271] env[61629]: DEBUG nova.network.neutron [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.273973] env[61629]: INFO nova.scheduler.client.report [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Deleted allocations for instance 6dd1097f-7353-4938-be2b-51c248e45fe2 [ 744.355759] env[61629]: INFO nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Took 0.57 seconds to detach 1 volumes for instance. [ 744.358678] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190e4d45-5cd5-41e1-8180-5119ad1502d5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.361313] env[61629]: DEBUG nova.compute.claims [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 744.361481] env[61629]: DEBUG oslo_concurrency.lockutils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.366721] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2bd3e9-99ac-4d11-b9f7-667131ce9283 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.400539] env[61629]: DEBUG oslo_concurrency.lockutils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.401339] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e1744e-84b9-4143-a543-233015398fcd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.409309] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912a6d2e-fb2c-47b8-9882-c51467772117 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.422703] env[61629]: DEBUG nova.compute.provider_tree [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.731032] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Releasing lock "refresh_cache-39f7c5ee-7d07-4516-b008-40d5778cf139" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.731032] env[61629]: DEBUG nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 744.731032] env[61629]: DEBUG nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 744.731280] env[61629]: DEBUG nova.network.neutron [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 744.758356] env[61629]: DEBUG nova.network.neutron [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.783289] env[61629]: DEBUG oslo_concurrency.lockutils [None req-346dafe1-9fa5-4ba7-a0f3-fdf4ab2cb7eb tempest-MigrationsAdminTest-1339947110 tempest-MigrationsAdminTest-1339947110-project-member] Lock "6dd1097f-7353-4938-be2b-51c248e45fe2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 163.973s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.926241] env[61629]: DEBUG nova.scheduler.client.report [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 745.264139] env[61629]: DEBUG nova.network.neutron [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.285725] env[61629]: DEBUG nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 745.435023] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.900s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.435023] env[61629]: ERROR nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b5180d58-fd4a-49e0-8719-ec41c4dd7ef5, please check neutron logs for more information. [ 745.435023] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Traceback (most recent call last): [ 745.435023] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 745.435023] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] self.driver.spawn(context, instance, image_meta, [ 745.435023] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 745.435023] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] self._vmops.spawn(context, instance, image_meta, injected_files, [ 745.435023] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 745.435023] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] vm_ref = self.build_virtual_machine(instance, [ 745.435793] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 745.435793] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] vif_infos = vmwarevif.get_vif_info(self._session, [ 745.435793] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 745.435793] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] for vif in network_info: [ 745.435793] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 745.435793] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] return self._sync_wrapper(fn, *args, **kwargs) [ 745.435793] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 745.435793] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] self.wait() [ 745.435793] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 745.435793] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] self[:] = self._gt.wait() [ 745.435793] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 745.435793] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] return self._exit_event.wait() [ 745.435793] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 745.436174] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] current.throw(*self._exc) [ 745.436174] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 745.436174] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] result = function(*args, **kwargs) [ 745.436174] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 745.436174] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] return func(*args, **kwargs) [ 745.436174] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 745.436174] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] raise e [ 745.436174] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.436174] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] nwinfo = self.network_api.allocate_for_instance( [ 745.436174] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 745.436174] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] created_port_ids = self._update_ports_for_instance( [ 745.436174] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 745.436174] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] with excutils.save_and_reraise_exception(): [ 745.436523] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.436523] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] self.force_reraise() [ 745.436523] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.436523] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] raise self.value [ 745.436523] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 745.436523] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] updated_port = self._update_port( [ 745.436523] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.436523] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] _ensure_no_port_binding_failure(port) [ 745.436523] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.436523] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] raise exception.PortBindingFailed(port_id=port['id']) [ 745.436523] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] nova.exception.PortBindingFailed: Binding failed for port b5180d58-fd4a-49e0-8719-ec41c4dd7ef5, please check neutron logs for more information. [ 745.436523] env[61629]: ERROR nova.compute.manager [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] [ 745.436917] env[61629]: DEBUG nova.compute.utils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Binding failed for port b5180d58-fd4a-49e0-8719-ec41c4dd7ef5, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 745.436917] env[61629]: DEBUG oslo_concurrency.lockutils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.509s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.438430] env[61629]: INFO nova.compute.claims [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 745.442973] env[61629]: DEBUG nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Build of instance 05b868fd-401e-48b7-928f-a39c002bbe71 was re-scheduled: Binding failed for port b5180d58-fd4a-49e0-8719-ec41c4dd7ef5, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 745.442973] env[61629]: DEBUG nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 745.443261] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Acquiring lock "refresh_cache-05b868fd-401e-48b7-928f-a39c002bbe71" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.443563] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Acquired lock "refresh_cache-05b868fd-401e-48b7-928f-a39c002bbe71" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.443874] env[61629]: DEBUG nova.network.neutron [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.767206] env[61629]: INFO nova.compute.manager [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] [instance: 39f7c5ee-7d07-4516-b008-40d5778cf139] Took 1.04 seconds to deallocate network for instance. [ 745.814357] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.927305] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.927538] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.969977] env[61629]: DEBUG nova.network.neutron [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.227219] env[61629]: DEBUG nova.network.neutron [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.729509] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Releasing lock "refresh_cache-05b868fd-401e-48b7-928f-a39c002bbe71" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.730526] env[61629]: DEBUG nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 746.730526] env[61629]: DEBUG nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 746.730526] env[61629]: DEBUG nova.network.neutron [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 746.746774] env[61629]: DEBUG nova.network.neutron [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.774546] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-007491f9-b782-41d1-96d1-34804bd5a4ac {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.785680] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28bb3a91-791a-49d2-8254-5d8beaa942f1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.820404] env[61629]: INFO nova.scheduler.client.report [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Deleted allocations for instance 39f7c5ee-7d07-4516-b008-40d5778cf139 [ 746.826685] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd38ea6a-783e-4795-8233-a12632ef55ad {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.835928] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff16acc0-8589-490a-8383-82d158a359e9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.850129] env[61629]: DEBUG nova.compute.provider_tree [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.250209] env[61629]: DEBUG nova.network.neutron [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.331563] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4f5c3391-de89-47a4-b300-c795c077e729 tempest-ServersTestFqdnHostnames-539757656 tempest-ServersTestFqdnHostnames-539757656-project-member] Lock "39f7c5ee-7d07-4516-b008-40d5778cf139" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.044s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.353737] env[61629]: DEBUG nova.scheduler.client.report [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 747.754183] env[61629]: INFO nova.compute.manager [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] [instance: 05b868fd-401e-48b7-928f-a39c002bbe71] Took 1.02 seconds to deallocate network for instance. [ 747.833880] env[61629]: DEBUG nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 747.858531] env[61629]: DEBUG oslo_concurrency.lockutils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.859068] env[61629]: DEBUG nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 747.865040] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.180s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.865611] env[61629]: INFO nova.compute.claims [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 748.359433] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.365094] env[61629]: DEBUG nova.compute.utils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 748.366871] env[61629]: DEBUG nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 748.367117] env[61629]: DEBUG nova.network.neutron [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 748.415803] env[61629]: DEBUG nova.policy [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64ff84e38bb942bfa942de62353bd356', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b0b101e81dfe4c8b98314be278282c0d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 748.715958] env[61629]: DEBUG nova.network.neutron [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Successfully created port: 86fd11c4-67bd-4e1f-b34b-1b2c9c5a4bac {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.792107] env[61629]: INFO nova.scheduler.client.report [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Deleted allocations for instance 05b868fd-401e-48b7-928f-a39c002bbe71 [ 748.869750] env[61629]: DEBUG nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 749.262149] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b57f41-b42f-46a0-ae04-088c18a1bd92 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.269646] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe90d85c-a863-4b51-8b38-f380b00ad62c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.313401] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d71cfd33-11ca-4c0e-b625-c9284523c737 tempest-ServerActionsTestOtherA-855364020 tempest-ServerActionsTestOtherA-855364020-project-member] Lock "05b868fd-401e-48b7-928f-a39c002bbe71" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 153.969s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.315130] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e911164f-0e4a-4044-948b-b3d098c39dd4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.325696] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441514ec-2572-4809-b22b-9fd446a4f517 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.340474] env[61629]: DEBUG nova.compute.provider_tree [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.634660] env[61629]: DEBUG nova.compute.manager [req-334a8618-afab-4967-ae92-6523bd9e1254 req-cff21290-31f0-4030-89c3-8d631c4ba11f service nova] [instance: ad374170-21a1-4036-9804-b82493701abf] Received event network-changed-86fd11c4-67bd-4e1f-b34b-1b2c9c5a4bac {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 749.634914] env[61629]: DEBUG nova.compute.manager [req-334a8618-afab-4967-ae92-6523bd9e1254 req-cff21290-31f0-4030-89c3-8d631c4ba11f service nova] [instance: ad374170-21a1-4036-9804-b82493701abf] Refreshing instance network info cache due to event network-changed-86fd11c4-67bd-4e1f-b34b-1b2c9c5a4bac. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 749.635294] env[61629]: DEBUG oslo_concurrency.lockutils [req-334a8618-afab-4967-ae92-6523bd9e1254 req-cff21290-31f0-4030-89c3-8d631c4ba11f service nova] Acquiring lock "refresh_cache-ad374170-21a1-4036-9804-b82493701abf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.635476] env[61629]: DEBUG oslo_concurrency.lockutils [req-334a8618-afab-4967-ae92-6523bd9e1254 req-cff21290-31f0-4030-89c3-8d631c4ba11f service nova] Acquired lock "refresh_cache-ad374170-21a1-4036-9804-b82493701abf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.635708] env[61629]: DEBUG nova.network.neutron [req-334a8618-afab-4967-ae92-6523bd9e1254 req-cff21290-31f0-4030-89c3-8d631c4ba11f service nova] [instance: ad374170-21a1-4036-9804-b82493701abf] Refreshing network info cache for port 86fd11c4-67bd-4e1f-b34b-1b2c9c5a4bac {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 749.820173] env[61629]: DEBUG nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 749.845729] env[61629]: DEBUG nova.scheduler.client.report [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 749.883076] env[61629]: DEBUG nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 749.916447] env[61629]: DEBUG nova.virt.hardware [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 749.916700] env[61629]: DEBUG nova.virt.hardware [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 749.916854] env[61629]: DEBUG nova.virt.hardware [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.917069] env[61629]: DEBUG nova.virt.hardware [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 749.917209] env[61629]: DEBUG nova.virt.hardware [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.917363] env[61629]: DEBUG nova.virt.hardware [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 749.917635] env[61629]: DEBUG nova.virt.hardware [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 749.917811] env[61629]: DEBUG nova.virt.hardware [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 749.918027] env[61629]: DEBUG nova.virt.hardware [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 749.918151] env[61629]: DEBUG nova.virt.hardware [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 749.918337] env[61629]: DEBUG nova.virt.hardware [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 749.919286] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff1b9ce-b4ee-4ad5-8f43-b1556bb9a267 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.929775] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8c2202-1b8e-4692-a541-27f4e44ed6a8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.994346] env[61629]: ERROR nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 86fd11c4-67bd-4e1f-b34b-1b2c9c5a4bac, please check neutron logs for more information. [ 749.994346] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 749.994346] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 749.994346] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 749.994346] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 749.994346] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 749.994346] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 749.994346] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 749.994346] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.994346] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 749.994346] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.994346] env[61629]: ERROR nova.compute.manager raise self.value [ 749.994346] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 749.994346] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 749.994346] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.994346] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 749.994955] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.994955] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 749.994955] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 86fd11c4-67bd-4e1f-b34b-1b2c9c5a4bac, please check neutron logs for more information. [ 749.994955] env[61629]: ERROR nova.compute.manager [ 749.994955] env[61629]: Traceback (most recent call last): [ 749.994955] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 749.994955] env[61629]: listener.cb(fileno) [ 749.994955] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 749.994955] env[61629]: result = function(*args, **kwargs) [ 749.994955] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 749.994955] env[61629]: return func(*args, **kwargs) [ 749.994955] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 749.994955] env[61629]: raise e [ 749.994955] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 749.994955] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 749.994955] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 749.994955] env[61629]: created_port_ids = self._update_ports_for_instance( [ 749.994955] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 749.994955] env[61629]: with excutils.save_and_reraise_exception(): [ 749.994955] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.994955] env[61629]: self.force_reraise() [ 749.994955] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.994955] env[61629]: raise self.value [ 749.994955] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 749.994955] env[61629]: updated_port = self._update_port( [ 749.994955] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.994955] env[61629]: _ensure_no_port_binding_failure(port) [ 749.994955] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.994955] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 749.996309] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 86fd11c4-67bd-4e1f-b34b-1b2c9c5a4bac, please check neutron logs for more information. [ 749.996309] env[61629]: Removing descriptor: 21 [ 749.996309] env[61629]: ERROR nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 86fd11c4-67bd-4e1f-b34b-1b2c9c5a4bac, please check neutron logs for more information. [ 749.996309] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] Traceback (most recent call last): [ 749.996309] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 749.996309] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] yield resources [ 749.996309] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 749.996309] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] self.driver.spawn(context, instance, image_meta, [ 749.996309] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 749.996309] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 749.996309] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 749.996309] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] vm_ref = self.build_virtual_machine(instance, [ 749.997232] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 749.997232] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] vif_infos = vmwarevif.get_vif_info(self._session, [ 749.997232] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 749.997232] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] for vif in network_info: [ 749.997232] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 749.997232] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] return self._sync_wrapper(fn, *args, **kwargs) [ 749.997232] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 749.997232] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] self.wait() [ 749.997232] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 749.997232] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] self[:] = self._gt.wait() [ 749.997232] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 749.997232] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] return self._exit_event.wait() [ 749.997232] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 749.997853] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] result = hub.switch() [ 749.997853] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 749.997853] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] return self.greenlet.switch() [ 749.997853] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 749.997853] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] result = function(*args, **kwargs) [ 749.997853] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 749.997853] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] return func(*args, **kwargs) [ 749.997853] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 749.997853] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] raise e [ 749.997853] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 749.997853] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] nwinfo = self.network_api.allocate_for_instance( [ 749.997853] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 749.997853] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] created_port_ids = self._update_ports_for_instance( [ 749.998726] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 749.998726] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] with excutils.save_and_reraise_exception(): [ 749.998726] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 749.998726] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] self.force_reraise() [ 749.998726] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 749.998726] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] raise self.value [ 749.998726] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 749.998726] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] updated_port = self._update_port( [ 749.998726] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 749.998726] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] _ensure_no_port_binding_failure(port) [ 749.998726] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 749.998726] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] raise exception.PortBindingFailed(port_id=port['id']) [ 749.999432] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] nova.exception.PortBindingFailed: Binding failed for port 86fd11c4-67bd-4e1f-b34b-1b2c9c5a4bac, please check neutron logs for more information. [ 749.999432] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] [ 749.999432] env[61629]: INFO nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Terminating instance [ 749.999432] env[61629]: DEBUG oslo_concurrency.lockutils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "refresh_cache-ad374170-21a1-4036-9804-b82493701abf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.168530] env[61629]: DEBUG nova.network.neutron [req-334a8618-afab-4967-ae92-6523bd9e1254 req-cff21290-31f0-4030-89c3-8d631c4ba11f service nova] [instance: ad374170-21a1-4036-9804-b82493701abf] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.306078] env[61629]: DEBUG nova.network.neutron [req-334a8618-afab-4967-ae92-6523bd9e1254 req-cff21290-31f0-4030-89c3-8d631c4ba11f service nova] [instance: ad374170-21a1-4036-9804-b82493701abf] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.342095] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.352010] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.353394] env[61629]: DEBUG nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 750.355652] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.884s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.621780] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Acquiring lock "edb4e0f6-57ad-48cf-aa20-3b2549bff3fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.623843] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Lock "edb4e0f6-57ad-48cf-aa20-3b2549bff3fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.812153] env[61629]: DEBUG oslo_concurrency.lockutils [req-334a8618-afab-4967-ae92-6523bd9e1254 req-cff21290-31f0-4030-89c3-8d631c4ba11f service nova] Releasing lock "refresh_cache-ad374170-21a1-4036-9804-b82493701abf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.812591] env[61629]: DEBUG oslo_concurrency.lockutils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquired lock "refresh_cache-ad374170-21a1-4036-9804-b82493701abf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.812778] env[61629]: DEBUG nova.network.neutron [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 750.860859] env[61629]: DEBUG nova.compute.utils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 750.865675] env[61629]: DEBUG nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 750.865778] env[61629]: DEBUG nova.network.neutron [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 750.937880] env[61629]: DEBUG nova.policy [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2da5bc6775a24e2bbab129d727de895f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27cc5f4c983a4a40aca3f207a6fed658', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 751.251711] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08b14aa-6c51-4a2c-91da-c7483678c4f7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.259808] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a889831d-609c-4c46-8020-40614e43eabf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.295695] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6f3c79-8dbe-4567-9155-d8c361119694 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.304422] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5bbb9c-756c-4f3f-b61b-d65a03c5e190 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.320903] env[61629]: DEBUG nova.compute.provider_tree [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.338034] env[61629]: DEBUG nova.network.neutron [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.366358] env[61629]: DEBUG nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 751.432555] env[61629]: DEBUG nova.network.neutron [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.504688] env[61629]: DEBUG nova.network.neutron [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Successfully created port: 12da692f-7fef-4759-89b1-a1e31061f346 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 751.661638] env[61629]: DEBUG nova.compute.manager [req-d4cd6857-2507-432e-8bf6-7440539a9632 req-1e43730f-182a-4b87-ac10-da59bef13334 service nova] [instance: ad374170-21a1-4036-9804-b82493701abf] Received event network-vif-deleted-86fd11c4-67bd-4e1f-b34b-1b2c9c5a4bac {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 751.826983] env[61629]: DEBUG nova.scheduler.client.report [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 751.937468] env[61629]: DEBUG oslo_concurrency.lockutils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Releasing lock "refresh_cache-ad374170-21a1-4036-9804-b82493701abf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.937887] env[61629]: DEBUG nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 751.938286] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 751.938432] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff9fa162-b293-45d0-bc7b-187b3fb2deb8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.948998] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f67edf-4ad9-4fc4-9faf-6b56fd169fd7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.972572] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ad374170-21a1-4036-9804-b82493701abf could not be found. [ 751.972855] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 751.972989] env[61629]: INFO nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Took 0.03 seconds to destroy the instance on the hypervisor. [ 751.973386] env[61629]: DEBUG oslo.service.loopingcall [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 751.973627] env[61629]: DEBUG nova.compute.manager [-] [instance: ad374170-21a1-4036-9804-b82493701abf] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 751.973729] env[61629]: DEBUG nova.network.neutron [-] [instance: ad374170-21a1-4036-9804-b82493701abf] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 751.999837] env[61629]: DEBUG nova.network.neutron [-] [instance: ad374170-21a1-4036-9804-b82493701abf] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 752.333464] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.978s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.334121] env[61629]: ERROR nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c24f97be-7a5e-4797-a8f6-c9cab32596b2, please check neutron logs for more information. [ 752.334121] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Traceback (most recent call last): [ 752.334121] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 752.334121] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] self.driver.spawn(context, instance, image_meta, [ 752.334121] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 752.334121] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] self._vmops.spawn(context, instance, image_meta, injected_files, [ 752.334121] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 752.334121] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] vm_ref = self.build_virtual_machine(instance, [ 752.334121] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 752.334121] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] vif_infos = vmwarevif.get_vif_info(self._session, [ 752.334121] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 752.334560] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] for vif in network_info: [ 752.334560] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 752.334560] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] return self._sync_wrapper(fn, *args, **kwargs) [ 752.334560] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 752.334560] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] self.wait() [ 752.334560] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 752.334560] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] self[:] = self._gt.wait() [ 752.334560] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 752.334560] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] return self._exit_event.wait() [ 752.334560] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 752.334560] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] current.throw(*self._exc) [ 752.334560] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 752.334560] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] result = function(*args, **kwargs) [ 752.334977] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 752.334977] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] return func(*args, **kwargs) [ 752.334977] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 752.334977] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] raise e [ 752.334977] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 752.334977] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] nwinfo = self.network_api.allocate_for_instance( [ 752.334977] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 752.334977] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] created_port_ids = self._update_ports_for_instance( [ 752.334977] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 752.334977] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] with excutils.save_and_reraise_exception(): [ 752.334977] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 752.334977] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] self.force_reraise() [ 752.334977] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 752.335414] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] raise self.value [ 752.335414] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 752.335414] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] updated_port = self._update_port( [ 752.335414] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 752.335414] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] _ensure_no_port_binding_failure(port) [ 752.335414] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 752.335414] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] raise exception.PortBindingFailed(port_id=port['id']) [ 752.335414] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] nova.exception.PortBindingFailed: Binding failed for port c24f97be-7a5e-4797-a8f6-c9cab32596b2, please check neutron logs for more information. [ 752.335414] env[61629]: ERROR nova.compute.manager [instance: 3a804973-af62-4de1-a4ee-5943209c5884] [ 752.335414] env[61629]: DEBUG nova.compute.utils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Binding failed for port c24f97be-7a5e-4797-a8f6-c9cab32596b2, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 752.336063] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.209s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.337566] env[61629]: INFO nova.compute.claims [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 752.340493] env[61629]: DEBUG nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Build of instance 3a804973-af62-4de1-a4ee-5943209c5884 was re-scheduled: Binding failed for port c24f97be-7a5e-4797-a8f6-c9cab32596b2, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 752.340926] env[61629]: DEBUG nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 752.341207] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquiring lock "refresh_cache-3a804973-af62-4de1-a4ee-5943209c5884" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.341388] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquired lock "refresh_cache-3a804973-af62-4de1-a4ee-5943209c5884" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.341575] env[61629]: DEBUG nova.network.neutron [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 752.380140] env[61629]: DEBUG nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 752.417307] env[61629]: DEBUG nova.virt.hardware [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 752.417542] env[61629]: DEBUG nova.virt.hardware [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 752.417700] env[61629]: DEBUG nova.virt.hardware [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 752.417879] env[61629]: DEBUG nova.virt.hardware [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 752.418197] env[61629]: DEBUG nova.virt.hardware [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 752.418826] env[61629]: DEBUG nova.virt.hardware [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 752.418826] env[61629]: DEBUG nova.virt.hardware [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 752.418826] env[61629]: DEBUG nova.virt.hardware [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 752.418945] env[61629]: DEBUG nova.virt.hardware [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 752.419527] env[61629]: DEBUG nova.virt.hardware [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 752.419527] env[61629]: DEBUG nova.virt.hardware [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 752.420402] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4b1fe6-2208-41e2-be56-2dcdbb0717d4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.433090] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43c1a17-55a6-4e9d-9277-b98f26dcfa04 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.503128] env[61629]: DEBUG nova.network.neutron [-] [instance: ad374170-21a1-4036-9804-b82493701abf] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.818290] env[61629]: ERROR nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 12da692f-7fef-4759-89b1-a1e31061f346, please check neutron logs for more information. [ 752.818290] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 752.818290] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 752.818290] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 752.818290] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 752.818290] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 752.818290] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 752.818290] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 752.818290] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 752.818290] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 752.818290] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 752.818290] env[61629]: ERROR nova.compute.manager raise self.value [ 752.818290] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 752.818290] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 752.818290] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 752.818290] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 752.819064] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 752.819064] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 752.819064] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 12da692f-7fef-4759-89b1-a1e31061f346, please check neutron logs for more information. [ 752.819064] env[61629]: ERROR nova.compute.manager [ 752.819064] env[61629]: Traceback (most recent call last): [ 752.819064] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 752.819064] env[61629]: listener.cb(fileno) [ 752.819064] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 752.819064] env[61629]: result = function(*args, **kwargs) [ 752.819064] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 752.819064] env[61629]: return func(*args, **kwargs) [ 752.819064] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 752.819064] env[61629]: raise e [ 752.819064] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 752.819064] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 752.819064] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 752.819064] env[61629]: created_port_ids = self._update_ports_for_instance( [ 752.819064] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 752.819064] env[61629]: with excutils.save_and_reraise_exception(): [ 752.819064] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 752.819064] env[61629]: self.force_reraise() [ 752.819064] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 752.819064] env[61629]: raise self.value [ 752.819064] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 752.819064] env[61629]: updated_port = self._update_port( [ 752.819064] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 752.819064] env[61629]: _ensure_no_port_binding_failure(port) [ 752.819064] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 752.819064] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 752.819874] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 12da692f-7fef-4759-89b1-a1e31061f346, please check neutron logs for more information. [ 752.819874] env[61629]: Removing descriptor: 21 [ 752.819874] env[61629]: ERROR nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 12da692f-7fef-4759-89b1-a1e31061f346, please check neutron logs for more information. [ 752.819874] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Traceback (most recent call last): [ 752.819874] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 752.819874] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] yield resources [ 752.819874] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 752.819874] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] self.driver.spawn(context, instance, image_meta, [ 752.819874] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 752.819874] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 752.819874] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 752.819874] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] vm_ref = self.build_virtual_machine(instance, [ 752.820229] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 752.820229] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] vif_infos = vmwarevif.get_vif_info(self._session, [ 752.820229] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 752.820229] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] for vif in network_info: [ 752.820229] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 752.820229] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] return self._sync_wrapper(fn, *args, **kwargs) [ 752.820229] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 752.820229] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] self.wait() [ 752.820229] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 752.820229] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] self[:] = self._gt.wait() [ 752.820229] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 752.820229] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] return self._exit_event.wait() [ 752.820229] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 752.820628] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] result = hub.switch() [ 752.820628] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 752.820628] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] return self.greenlet.switch() [ 752.820628] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 752.820628] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] result = function(*args, **kwargs) [ 752.820628] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 752.820628] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] return func(*args, **kwargs) [ 752.820628] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 752.820628] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] raise e [ 752.820628] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 752.820628] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] nwinfo = self.network_api.allocate_for_instance( [ 752.820628] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 752.820628] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] created_port_ids = self._update_ports_for_instance( [ 752.821202] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 752.821202] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] with excutils.save_and_reraise_exception(): [ 752.821202] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 752.821202] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] self.force_reraise() [ 752.821202] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 752.821202] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] raise self.value [ 752.821202] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 752.821202] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] updated_port = self._update_port( [ 752.821202] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 752.821202] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] _ensure_no_port_binding_failure(port) [ 752.821202] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 752.821202] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] raise exception.PortBindingFailed(port_id=port['id']) [ 752.821573] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] nova.exception.PortBindingFailed: Binding failed for port 12da692f-7fef-4759-89b1-a1e31061f346, please check neutron logs for more information. [ 752.821573] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] [ 752.821573] env[61629]: INFO nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Terminating instance [ 752.822319] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquiring lock "refresh_cache-079cb97b-b7d4-4f25-9f1d-f77f34a2efbe" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.822496] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquired lock "refresh_cache-079cb97b-b7d4-4f25-9f1d-f77f34a2efbe" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.823144] env[61629]: DEBUG nova.network.neutron [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 752.871616] env[61629]: DEBUG nova.network.neutron [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 752.966830] env[61629]: DEBUG nova.network.neutron [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.005815] env[61629]: INFO nova.compute.manager [-] [instance: ad374170-21a1-4036-9804-b82493701abf] Took 1.03 seconds to deallocate network for instance. [ 753.008047] env[61629]: DEBUG nova.compute.claims [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 753.008231] env[61629]: DEBUG oslo_concurrency.lockutils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.344634] env[61629]: DEBUG nova.network.neutron [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 753.438366] env[61629]: DEBUG nova.network.neutron [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.471690] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Releasing lock "refresh_cache-3a804973-af62-4de1-a4ee-5943209c5884" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.471931] env[61629]: DEBUG nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 753.472128] env[61629]: DEBUG nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 753.472295] env[61629]: DEBUG nova.network.neutron [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 753.489166] env[61629]: DEBUG nova.network.neutron [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 753.612841] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cdfd034-9a7a-4805-86b7-7247f06fb141 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.620769] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842af175-fd4b-4f05-ab91-0effe16563ed {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.651382] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee19ae3-8e48-467d-9fca-f37bd123599f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.658480] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36550a8e-7df9-407c-92fb-54a045da0db5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.673831] env[61629]: DEBUG nova.compute.provider_tree [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.705090] env[61629]: DEBUG nova.compute.manager [req-c30e7e89-f7c2-46f2-8415-e886ae48e0dd req-26d4a4dc-911f-4d3e-a673-b5f87aff6446 service nova] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Received event network-changed-12da692f-7fef-4759-89b1-a1e31061f346 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 753.705297] env[61629]: DEBUG nova.compute.manager [req-c30e7e89-f7c2-46f2-8415-e886ae48e0dd req-26d4a4dc-911f-4d3e-a673-b5f87aff6446 service nova] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Refreshing instance network info cache due to event network-changed-12da692f-7fef-4759-89b1-a1e31061f346. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 753.705495] env[61629]: DEBUG oslo_concurrency.lockutils [req-c30e7e89-f7c2-46f2-8415-e886ae48e0dd req-26d4a4dc-911f-4d3e-a673-b5f87aff6446 service nova] Acquiring lock "refresh_cache-079cb97b-b7d4-4f25-9f1d-f77f34a2efbe" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.941166] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Releasing lock "refresh_cache-079cb97b-b7d4-4f25-9f1d-f77f34a2efbe" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.941615] env[61629]: DEBUG nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 753.941810] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 753.942139] env[61629]: DEBUG oslo_concurrency.lockutils [req-c30e7e89-f7c2-46f2-8415-e886ae48e0dd req-26d4a4dc-911f-4d3e-a673-b5f87aff6446 service nova] Acquired lock "refresh_cache-079cb97b-b7d4-4f25-9f1d-f77f34a2efbe" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.942311] env[61629]: DEBUG nova.network.neutron [req-c30e7e89-f7c2-46f2-8415-e886ae48e0dd req-26d4a4dc-911f-4d3e-a673-b5f87aff6446 service nova] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Refreshing network info cache for port 12da692f-7fef-4759-89b1-a1e31061f346 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 753.943548] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-65f18314-6761-471a-b8ad-3f67f612d173 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.953979] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5136d2d1-5159-43f0-92b5-e90fe6e3f3b1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.976613] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe could not be found. [ 753.976909] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 753.976964] env[61629]: INFO nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Took 0.04 seconds to destroy the instance on the hypervisor. [ 753.977213] env[61629]: DEBUG oslo.service.loopingcall [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 753.977434] env[61629]: DEBUG nova.compute.manager [-] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 753.977534] env[61629]: DEBUG nova.network.neutron [-] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 753.992189] env[61629]: DEBUG nova.network.neutron [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.998848] env[61629]: DEBUG nova.network.neutron [-] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 754.180201] env[61629]: DEBUG nova.scheduler.client.report [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 754.462500] env[61629]: DEBUG nova.network.neutron [req-c30e7e89-f7c2-46f2-8415-e886ae48e0dd req-26d4a4dc-911f-4d3e-a673-b5f87aff6446 service nova] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 754.495440] env[61629]: INFO nova.compute.manager [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: 3a804973-af62-4de1-a4ee-5943209c5884] Took 1.02 seconds to deallocate network for instance. [ 754.502120] env[61629]: DEBUG nova.network.neutron [-] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.561723] env[61629]: DEBUG nova.network.neutron [req-c30e7e89-f7c2-46f2-8415-e886ae48e0dd req-26d4a4dc-911f-4d3e-a673-b5f87aff6446 service nova] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.685173] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.349s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.685809] env[61629]: DEBUG nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 754.688541] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.649s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.689951] env[61629]: INFO nova.compute.claims [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 754.720980] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 754.721228] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.005373] env[61629]: INFO nova.compute.manager [-] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Took 1.03 seconds to deallocate network for instance. [ 755.007228] env[61629]: DEBUG nova.compute.claims [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 755.007422] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.065506] env[61629]: DEBUG oslo_concurrency.lockutils [req-c30e7e89-f7c2-46f2-8415-e886ae48e0dd req-26d4a4dc-911f-4d3e-a673-b5f87aff6446 service nova] Releasing lock "refresh_cache-079cb97b-b7d4-4f25-9f1d-f77f34a2efbe" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.065709] env[61629]: DEBUG nova.compute.manager [req-c30e7e89-f7c2-46f2-8415-e886ae48e0dd req-26d4a4dc-911f-4d3e-a673-b5f87aff6446 service nova] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Received event network-vif-deleted-12da692f-7fef-4759-89b1-a1e31061f346 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 755.194935] env[61629]: DEBUG nova.compute.utils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 755.198304] env[61629]: DEBUG nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 755.198304] env[61629]: DEBUG nova.network.neutron [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 755.227052] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.227189] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Starting heal instance info cache {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 755.227309] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Rebuilding the list of instances to heal {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 755.237010] env[61629]: DEBUG nova.policy [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38cc8b6343d54d30a3f6f13512d23020', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e7fced3a50d4821b42cf087d8111cb7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 755.516181] env[61629]: DEBUG nova.network.neutron [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Successfully created port: e9d30088-009c-4567-a13b-b3bc5766fc05 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 755.528762] env[61629]: INFO nova.scheduler.client.report [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Deleted allocations for instance 3a804973-af62-4de1-a4ee-5943209c5884 [ 755.698621] env[61629]: DEBUG nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 755.736685] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 755.736685] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: ad374170-21a1-4036-9804-b82493701abf] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 755.736685] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 755.736685] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 755.736685] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 755.736685] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Didn't find any instances for network info cache update. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 755.737031] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.737031] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.737031] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.737031] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.737031] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.737031] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.737227] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61629) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 755.737227] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager.update_available_resource {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 756.023970] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94356527-d290-42d9-b2bb-bc26e55015ec {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.032296] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc59d17b-cd6f-4dc9-84fb-c42efc64925a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.061689] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c3e7675e-0978-4f96-81d9-e456973d5bbd tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "3a804973-af62-4de1-a4ee-5943209c5884" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 158.784s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.063546] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a47ffd98-d320-4445-b7ca-c371a37e183e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.071406] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5927cd-700c-4cb0-a55b-bf128f338d71 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.085863] env[61629]: DEBUG nova.compute.provider_tree [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.242381] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.413111] env[61629]: DEBUG nova.compute.manager [req-14b4b0f5-ef9c-450e-bdfd-d1dc07c462e7 req-f2af32e4-983c-4882-9977-09a3f833172f service nova] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Received event network-changed-e9d30088-009c-4567-a13b-b3bc5766fc05 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 756.413323] env[61629]: DEBUG nova.compute.manager [req-14b4b0f5-ef9c-450e-bdfd-d1dc07c462e7 req-f2af32e4-983c-4882-9977-09a3f833172f service nova] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Refreshing instance network info cache due to event network-changed-e9d30088-009c-4567-a13b-b3bc5766fc05. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 756.413562] env[61629]: DEBUG oslo_concurrency.lockutils [req-14b4b0f5-ef9c-450e-bdfd-d1dc07c462e7 req-f2af32e4-983c-4882-9977-09a3f833172f service nova] Acquiring lock "refresh_cache-443ad254-3d5d-4fb8-a565-ce70c352e3f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.413709] env[61629]: DEBUG oslo_concurrency.lockutils [req-14b4b0f5-ef9c-450e-bdfd-d1dc07c462e7 req-f2af32e4-983c-4882-9977-09a3f833172f service nova] Acquired lock "refresh_cache-443ad254-3d5d-4fb8-a565-ce70c352e3f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.413869] env[61629]: DEBUG nova.network.neutron [req-14b4b0f5-ef9c-450e-bdfd-d1dc07c462e7 req-f2af32e4-983c-4882-9977-09a3f833172f service nova] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Refreshing network info cache for port e9d30088-009c-4567-a13b-b3bc5766fc05 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 756.567812] env[61629]: DEBUG nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 756.589065] env[61629]: DEBUG nova.scheduler.client.report [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 756.611307] env[61629]: ERROR nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e9d30088-009c-4567-a13b-b3bc5766fc05, please check neutron logs for more information. [ 756.611307] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 756.611307] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 756.611307] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 756.611307] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 756.611307] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 756.611307] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 756.611307] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 756.611307] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.611307] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 756.611307] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.611307] env[61629]: ERROR nova.compute.manager raise self.value [ 756.611307] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 756.611307] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 756.611307] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.611307] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 756.611787] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.611787] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 756.611787] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e9d30088-009c-4567-a13b-b3bc5766fc05, please check neutron logs for more information. [ 756.611787] env[61629]: ERROR nova.compute.manager [ 756.611787] env[61629]: Traceback (most recent call last): [ 756.611787] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 756.611787] env[61629]: listener.cb(fileno) [ 756.611787] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 756.611787] env[61629]: result = function(*args, **kwargs) [ 756.611787] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 756.611787] env[61629]: return func(*args, **kwargs) [ 756.611787] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 756.611787] env[61629]: raise e [ 756.611787] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 756.611787] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 756.611787] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 756.611787] env[61629]: created_port_ids = self._update_ports_for_instance( [ 756.611787] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 756.611787] env[61629]: with excutils.save_and_reraise_exception(): [ 756.611787] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.611787] env[61629]: self.force_reraise() [ 756.611787] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.611787] env[61629]: raise self.value [ 756.611787] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 756.611787] env[61629]: updated_port = self._update_port( [ 756.611787] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.611787] env[61629]: _ensure_no_port_binding_failure(port) [ 756.611787] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.611787] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 756.612734] env[61629]: nova.exception.PortBindingFailed: Binding failed for port e9d30088-009c-4567-a13b-b3bc5766fc05, please check neutron logs for more information. [ 756.612734] env[61629]: Removing descriptor: 21 [ 756.714529] env[61629]: DEBUG nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 756.734514] env[61629]: DEBUG nova.virt.hardware [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 756.734756] env[61629]: DEBUG nova.virt.hardware [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 756.734909] env[61629]: DEBUG nova.virt.hardware [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 756.735096] env[61629]: DEBUG nova.virt.hardware [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 756.735250] env[61629]: DEBUG nova.virt.hardware [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 756.735395] env[61629]: DEBUG nova.virt.hardware [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 756.735603] env[61629]: DEBUG nova.virt.hardware [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 756.735759] env[61629]: DEBUG nova.virt.hardware [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 756.735921] env[61629]: DEBUG nova.virt.hardware [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 756.736093] env[61629]: DEBUG nova.virt.hardware [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 756.736269] env[61629]: DEBUG nova.virt.hardware [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 756.737142] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b61f1ff-0d25-4b7a-883a-fc1701581d84 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.745398] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d7111a-3295-4b28-be72-5775978ddde3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.759012] env[61629]: ERROR nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e9d30088-009c-4567-a13b-b3bc5766fc05, please check neutron logs for more information. [ 756.759012] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Traceback (most recent call last): [ 756.759012] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 756.759012] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] yield resources [ 756.759012] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 756.759012] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] self.driver.spawn(context, instance, image_meta, [ 756.759012] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 756.759012] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 756.759012] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 756.759012] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] vm_ref = self.build_virtual_machine(instance, [ 756.759012] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 756.759428] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] vif_infos = vmwarevif.get_vif_info(self._session, [ 756.759428] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 756.759428] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] for vif in network_info: [ 756.759428] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 756.759428] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] return self._sync_wrapper(fn, *args, **kwargs) [ 756.759428] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 756.759428] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] self.wait() [ 756.759428] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 756.759428] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] self[:] = self._gt.wait() [ 756.759428] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 756.759428] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] return self._exit_event.wait() [ 756.759428] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 756.759428] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] current.throw(*self._exc) [ 756.759863] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 756.759863] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] result = function(*args, **kwargs) [ 756.759863] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 756.759863] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] return func(*args, **kwargs) [ 756.759863] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 756.759863] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] raise e [ 756.759863] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 756.759863] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] nwinfo = self.network_api.allocate_for_instance( [ 756.759863] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 756.759863] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] created_port_ids = self._update_ports_for_instance( [ 756.759863] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 756.759863] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] with excutils.save_and_reraise_exception(): [ 756.759863] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.760440] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] self.force_reraise() [ 756.760440] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.760440] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] raise self.value [ 756.760440] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 756.760440] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] updated_port = self._update_port( [ 756.760440] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.760440] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] _ensure_no_port_binding_failure(port) [ 756.760440] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.760440] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] raise exception.PortBindingFailed(port_id=port['id']) [ 756.760440] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] nova.exception.PortBindingFailed: Binding failed for port e9d30088-009c-4567-a13b-b3bc5766fc05, please check neutron logs for more information. [ 756.760440] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] [ 756.760440] env[61629]: INFO nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Terminating instance [ 756.761552] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "refresh_cache-443ad254-3d5d-4fb8-a565-ce70c352e3f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.935380] env[61629]: DEBUG nova.network.neutron [req-14b4b0f5-ef9c-450e-bdfd-d1dc07c462e7 req-f2af32e4-983c-4882-9977-09a3f833172f service nova] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 757.035029] env[61629]: DEBUG nova.network.neutron [req-14b4b0f5-ef9c-450e-bdfd-d1dc07c462e7 req-f2af32e4-983c-4882-9977-09a3f833172f service nova] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.064845] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquiring lock "c3f830d6-8999-49d5-a431-b09dfdaf8313" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.065097] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "c3f830d6-8999-49d5-a431-b09dfdaf8313" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.092042] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.093910] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.094409] env[61629]: DEBUG nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 757.097014] env[61629]: DEBUG oslo_concurrency.lockutils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.735s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.537935] env[61629]: DEBUG oslo_concurrency.lockutils [req-14b4b0f5-ef9c-450e-bdfd-d1dc07c462e7 req-f2af32e4-983c-4882-9977-09a3f833172f service nova] Releasing lock "refresh_cache-443ad254-3d5d-4fb8-a565-ce70c352e3f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.538410] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "refresh_cache-443ad254-3d5d-4fb8-a565-ce70c352e3f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.538598] env[61629]: DEBUG nova.network.neutron [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 757.601487] env[61629]: DEBUG nova.compute.utils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 757.605670] env[61629]: DEBUG nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 757.605783] env[61629]: DEBUG nova.network.neutron [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 757.656812] env[61629]: DEBUG nova.policy [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ff5dd6ffbf5452e8f56a1f64ae175b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67e38fd8e30349c6857025719fd26211', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 757.908535] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f0ed69-b12f-44dd-94ba-5a990f25aede {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.916660] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9cebf71-7900-4d59-bb37-07777e7debae {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.945698] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8637da9-419f-453f-adca-2385a1c35e41 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.952862] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d970221-6dde-4530-9ae6-c5994607c676 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.966620] env[61629]: DEBUG nova.compute.provider_tree [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.056996] env[61629]: DEBUG nova.network.neutron [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 758.061598] env[61629]: DEBUG nova.network.neutron [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Successfully created port: 35513a37-8c02-451a-85d7-dc0055751458 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 758.108751] env[61629]: DEBUG nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 758.159904] env[61629]: DEBUG nova.network.neutron [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.439119] env[61629]: DEBUG nova.compute.manager [req-5351ca06-d1c0-4f1b-9aa0-005c39749666 req-5b091a15-68b0-4c88-91be-95954c36cf0a service nova] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Received event network-vif-deleted-e9d30088-009c-4567-a13b-b3bc5766fc05 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 758.470637] env[61629]: DEBUG nova.scheduler.client.report [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 758.662222] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "refresh_cache-443ad254-3d5d-4fb8-a565-ce70c352e3f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.662656] env[61629]: DEBUG nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 758.662849] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 758.663142] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e0a62d6-a3a8-40c4-abeb-88173257d24e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.673201] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d2c578-2e4c-45df-a825-51aa3302ea28 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.694404] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 443ad254-3d5d-4fb8-a565-ce70c352e3f2 could not be found. [ 758.694629] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 758.694810] env[61629]: INFO nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Took 0.03 seconds to destroy the instance on the hypervisor. [ 758.695060] env[61629]: DEBUG oslo.service.loopingcall [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 758.695276] env[61629]: DEBUG nova.compute.manager [-] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 758.695370] env[61629]: DEBUG nova.network.neutron [-] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 758.715681] env[61629]: DEBUG nova.network.neutron [-] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 758.975439] env[61629]: DEBUG oslo_concurrency.lockutils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.878s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.976012] env[61629]: ERROR nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 804a1b0f-457e-4300-b463-8dc58d2b784f, please check neutron logs for more information. [ 758.976012] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Traceback (most recent call last): [ 758.976012] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 758.976012] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] self.driver.spawn(context, instance, image_meta, [ 758.976012] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 758.976012] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 758.976012] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 758.976012] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] vm_ref = self.build_virtual_machine(instance, [ 758.976012] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 758.976012] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] vif_infos = vmwarevif.get_vif_info(self._session, [ 758.976012] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 758.976396] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] for vif in network_info: [ 758.976396] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 758.976396] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] return self._sync_wrapper(fn, *args, **kwargs) [ 758.976396] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 758.976396] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] self.wait() [ 758.976396] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 758.976396] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] self[:] = self._gt.wait() [ 758.976396] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 758.976396] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] return self._exit_event.wait() [ 758.976396] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 758.976396] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] current.throw(*self._exc) [ 758.976396] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 758.976396] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] result = function(*args, **kwargs) [ 758.976820] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 758.976820] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] return func(*args, **kwargs) [ 758.976820] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 758.976820] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] raise e [ 758.976820] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 758.976820] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] nwinfo = self.network_api.allocate_for_instance( [ 758.976820] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 758.976820] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] created_port_ids = self._update_ports_for_instance( [ 758.976820] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 758.976820] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] with excutils.save_and_reraise_exception(): [ 758.976820] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.976820] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] self.force_reraise() [ 758.976820] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.977339] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] raise self.value [ 758.977339] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 758.977339] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] updated_port = self._update_port( [ 758.977339] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.977339] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] _ensure_no_port_binding_failure(port) [ 758.977339] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.977339] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] raise exception.PortBindingFailed(port_id=port['id']) [ 758.977339] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] nova.exception.PortBindingFailed: Binding failed for port 804a1b0f-457e-4300-b463-8dc58d2b784f, please check neutron logs for more information. [ 758.977339] env[61629]: ERROR nova.compute.manager [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] [ 758.977339] env[61629]: DEBUG nova.compute.utils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Binding failed for port 804a1b0f-457e-4300-b463-8dc58d2b784f, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 758.977905] env[61629]: DEBUG oslo_concurrency.lockutils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.577s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.979347] env[61629]: INFO nova.compute.claims [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 758.981968] env[61629]: DEBUG nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Build of instance 012e6d9c-0f02-4761-9639-9a8e8972ea2b was re-scheduled: Binding failed for port 804a1b0f-457e-4300-b463-8dc58d2b784f, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 758.983176] env[61629]: DEBUG nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 758.983176] env[61629]: DEBUG oslo_concurrency.lockutils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Acquiring lock "refresh_cache-012e6d9c-0f02-4761-9639-9a8e8972ea2b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.983176] env[61629]: DEBUG oslo_concurrency.lockutils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Acquired lock "refresh_cache-012e6d9c-0f02-4761-9639-9a8e8972ea2b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.983176] env[61629]: DEBUG nova.network.neutron [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 758.988730] env[61629]: ERROR nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 35513a37-8c02-451a-85d7-dc0055751458, please check neutron logs for more information. [ 758.988730] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 758.988730] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 758.988730] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 758.988730] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 758.988730] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 758.988730] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 758.988730] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 758.988730] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.988730] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 758.988730] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.988730] env[61629]: ERROR nova.compute.manager raise self.value [ 758.988730] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 758.988730] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 758.988730] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.988730] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 758.989315] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.989315] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 758.989315] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 35513a37-8c02-451a-85d7-dc0055751458, please check neutron logs for more information. [ 758.989315] env[61629]: ERROR nova.compute.manager [ 758.989315] env[61629]: Traceback (most recent call last): [ 758.989315] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 758.989315] env[61629]: listener.cb(fileno) [ 758.989315] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 758.989315] env[61629]: result = function(*args, **kwargs) [ 758.989315] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 758.989315] env[61629]: return func(*args, **kwargs) [ 758.989315] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 758.989315] env[61629]: raise e [ 758.989315] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 758.989315] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 758.989315] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 758.989315] env[61629]: created_port_ids = self._update_ports_for_instance( [ 758.989315] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 758.989315] env[61629]: with excutils.save_and_reraise_exception(): [ 758.989315] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.989315] env[61629]: self.force_reraise() [ 758.989315] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.989315] env[61629]: raise self.value [ 758.989315] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 758.989315] env[61629]: updated_port = self._update_port( [ 758.989315] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.989315] env[61629]: _ensure_no_port_binding_failure(port) [ 758.989315] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.989315] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 758.990263] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 35513a37-8c02-451a-85d7-dc0055751458, please check neutron logs for more information. [ 758.990263] env[61629]: Removing descriptor: 21 [ 759.119539] env[61629]: DEBUG nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 759.144971] env[61629]: DEBUG nova.virt.hardware [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 759.145238] env[61629]: DEBUG nova.virt.hardware [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 759.145397] env[61629]: DEBUG nova.virt.hardware [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 759.145576] env[61629]: DEBUG nova.virt.hardware [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 759.145740] env[61629]: DEBUG nova.virt.hardware [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 759.145965] env[61629]: DEBUG nova.virt.hardware [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 759.146271] env[61629]: DEBUG nova.virt.hardware [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 759.146503] env[61629]: DEBUG nova.virt.hardware [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 759.146742] env[61629]: DEBUG nova.virt.hardware [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 759.146992] env[61629]: DEBUG nova.virt.hardware [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 759.147273] env[61629]: DEBUG nova.virt.hardware [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 759.148559] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96416bce-5470-41cf-9943-a3ab6c4a73a6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.156097] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ee0685-7642-4862-9869-00ddda23b3fa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.169513] env[61629]: ERROR nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 35513a37-8c02-451a-85d7-dc0055751458, please check neutron logs for more information. [ 759.169513] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Traceback (most recent call last): [ 759.169513] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 759.169513] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] yield resources [ 759.169513] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 759.169513] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] self.driver.spawn(context, instance, image_meta, [ 759.169513] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 759.169513] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] self._vmops.spawn(context, instance, image_meta, injected_files, [ 759.169513] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 759.169513] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] vm_ref = self.build_virtual_machine(instance, [ 759.169513] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 759.169919] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] vif_infos = vmwarevif.get_vif_info(self._session, [ 759.169919] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 759.169919] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] for vif in network_info: [ 759.169919] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 759.169919] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] return self._sync_wrapper(fn, *args, **kwargs) [ 759.169919] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 759.169919] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] self.wait() [ 759.169919] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 759.169919] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] self[:] = self._gt.wait() [ 759.169919] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 759.169919] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] return self._exit_event.wait() [ 759.169919] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 759.169919] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] current.throw(*self._exc) [ 759.170310] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 759.170310] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] result = function(*args, **kwargs) [ 759.170310] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 759.170310] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] return func(*args, **kwargs) [ 759.170310] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 759.170310] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] raise e [ 759.170310] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 759.170310] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] nwinfo = self.network_api.allocate_for_instance( [ 759.170310] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 759.170310] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] created_port_ids = self._update_ports_for_instance( [ 759.170310] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 759.170310] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] with excutils.save_and_reraise_exception(): [ 759.170310] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 759.170752] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] self.force_reraise() [ 759.170752] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 759.170752] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] raise self.value [ 759.170752] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 759.170752] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] updated_port = self._update_port( [ 759.170752] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 759.170752] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] _ensure_no_port_binding_failure(port) [ 759.170752] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 759.170752] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] raise exception.PortBindingFailed(port_id=port['id']) [ 759.170752] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] nova.exception.PortBindingFailed: Binding failed for port 35513a37-8c02-451a-85d7-dc0055751458, please check neutron logs for more information. [ 759.170752] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] [ 759.170752] env[61629]: INFO nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Terminating instance [ 759.171704] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquiring lock "refresh_cache-d013c1e1-952a-4b76-a44d-8499f5159c42" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.171863] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquired lock "refresh_cache-d013c1e1-952a-4b76-a44d-8499f5159c42" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.172046] env[61629]: DEBUG nova.network.neutron [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 759.218025] env[61629]: DEBUG nova.network.neutron [-] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.501142] env[61629]: DEBUG nova.network.neutron [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 759.579645] env[61629]: DEBUG nova.network.neutron [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.688639] env[61629]: DEBUG nova.network.neutron [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 759.721115] env[61629]: INFO nova.compute.manager [-] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Took 1.02 seconds to deallocate network for instance. [ 759.722687] env[61629]: DEBUG nova.compute.claims [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 759.722879] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.744428] env[61629]: DEBUG nova.network.neutron [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.081825] env[61629]: DEBUG oslo_concurrency.lockutils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Releasing lock "refresh_cache-012e6d9c-0f02-4761-9639-9a8e8972ea2b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.082071] env[61629]: DEBUG nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 760.082258] env[61629]: DEBUG nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 760.082428] env[61629]: DEBUG nova.network.neutron [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 760.105605] env[61629]: DEBUG nova.network.neutron [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.244299] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d74048-1df7-44ba-8140-75ef233c6651 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.247222] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Releasing lock "refresh_cache-d013c1e1-952a-4b76-a44d-8499f5159c42" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.247738] env[61629]: DEBUG nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 760.248085] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 760.250179] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-189dabc5-270d-494e-ad23-b5054845ee49 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.252867] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461d13bb-fdc8-4b59-a297-72f2f6702608 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.285737] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30244950-cfa1-41d8-9b7a-9ac3df043a7c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.297739] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebe5f54-c76c-484a-9923-5a8683bc5be0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.305137] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2129dc-8b0a-4e32-a205-f56cbfa783e2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.316084] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d013c1e1-952a-4b76-a44d-8499f5159c42 could not be found. [ 760.316084] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 760.316084] env[61629]: INFO nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Took 0.07 seconds to destroy the instance on the hypervisor. [ 760.316301] env[61629]: DEBUG oslo.service.loopingcall [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 760.317027] env[61629]: DEBUG nova.compute.manager [-] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 760.317027] env[61629]: DEBUG nova.network.neutron [-] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 760.326167] env[61629]: DEBUG nova.compute.provider_tree [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.342184] env[61629]: DEBUG nova.network.neutron [-] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.465930] env[61629]: DEBUG nova.compute.manager [req-614452db-b5c3-4f53-b550-fced75d9f43c req-14d81e49-6667-41a7-9893-76a43e5843b9 service nova] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Received event network-changed-35513a37-8c02-451a-85d7-dc0055751458 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 760.466149] env[61629]: DEBUG nova.compute.manager [req-614452db-b5c3-4f53-b550-fced75d9f43c req-14d81e49-6667-41a7-9893-76a43e5843b9 service nova] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Refreshing instance network info cache due to event network-changed-35513a37-8c02-451a-85d7-dc0055751458. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 760.466359] env[61629]: DEBUG oslo_concurrency.lockutils [req-614452db-b5c3-4f53-b550-fced75d9f43c req-14d81e49-6667-41a7-9893-76a43e5843b9 service nova] Acquiring lock "refresh_cache-d013c1e1-952a-4b76-a44d-8499f5159c42" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.466496] env[61629]: DEBUG oslo_concurrency.lockutils [req-614452db-b5c3-4f53-b550-fced75d9f43c req-14d81e49-6667-41a7-9893-76a43e5843b9 service nova] Acquired lock "refresh_cache-d013c1e1-952a-4b76-a44d-8499f5159c42" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.466651] env[61629]: DEBUG nova.network.neutron [req-614452db-b5c3-4f53-b550-fced75d9f43c req-14d81e49-6667-41a7-9893-76a43e5843b9 service nova] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Refreshing network info cache for port 35513a37-8c02-451a-85d7-dc0055751458 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 760.609539] env[61629]: DEBUG nova.network.neutron [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.833080] env[61629]: DEBUG nova.scheduler.client.report [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 760.844043] env[61629]: DEBUG nova.network.neutron [-] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.983715] env[61629]: DEBUG nova.network.neutron [req-614452db-b5c3-4f53-b550-fced75d9f43c req-14d81e49-6667-41a7-9893-76a43e5843b9 service nova] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.049097] env[61629]: DEBUG nova.network.neutron [req-614452db-b5c3-4f53-b550-fced75d9f43c req-14d81e49-6667-41a7-9893-76a43e5843b9 service nova] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.112377] env[61629]: INFO nova.compute.manager [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] [instance: 012e6d9c-0f02-4761-9639-9a8e8972ea2b] Took 1.03 seconds to deallocate network for instance. [ 761.338081] env[61629]: DEBUG oslo_concurrency.lockutils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.360s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.338614] env[61629]: DEBUG nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 761.341439] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.527s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.342806] env[61629]: INFO nova.compute.claims [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 761.347121] env[61629]: INFO nova.compute.manager [-] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Took 1.03 seconds to deallocate network for instance. [ 761.348302] env[61629]: DEBUG nova.compute.claims [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 761.348470] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.551350] env[61629]: DEBUG oslo_concurrency.lockutils [req-614452db-b5c3-4f53-b550-fced75d9f43c req-14d81e49-6667-41a7-9893-76a43e5843b9 service nova] Releasing lock "refresh_cache-d013c1e1-952a-4b76-a44d-8499f5159c42" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.551623] env[61629]: DEBUG nova.compute.manager [req-614452db-b5c3-4f53-b550-fced75d9f43c req-14d81e49-6667-41a7-9893-76a43e5843b9 service nova] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Received event network-vif-deleted-35513a37-8c02-451a-85d7-dc0055751458 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 761.847070] env[61629]: DEBUG nova.compute.utils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 761.851045] env[61629]: DEBUG nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 761.851045] env[61629]: DEBUG nova.network.neutron [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 761.912223] env[61629]: DEBUG nova.policy [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a50df5fb39b404090e7ff99f8ff1829', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9f7fbb4822c44636b7ad99e213792a5d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 762.147900] env[61629]: INFO nova.scheduler.client.report [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Deleted allocations for instance 012e6d9c-0f02-4761-9639-9a8e8972ea2b [ 762.351348] env[61629]: DEBUG nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 762.420877] env[61629]: DEBUG nova.network.neutron [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Successfully created port: 939df38e-b1dd-4f13-afa2-3a0385b51db1 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 762.652581] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afabb33-9b0e-4a10-ad71-118077373096 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.657521] env[61629]: DEBUG oslo_concurrency.lockutils [None req-be4dc6f4-2b9b-4cb5-a218-078b4bff57f2 tempest-ServerActionsV293TestJSON-888516803 tempest-ServerActionsV293TestJSON-888516803-project-member] Lock "012e6d9c-0f02-4761-9639-9a8e8972ea2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 164.994s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.665060] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249ecea8-a077-4cc4-88ea-490bce5c47b1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.700499] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7778f971-356e-4426-83db-796bd1b0e639 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.708022] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a972eb-6e6e-40a9-a932-ddde914d0659 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.721431] env[61629]: DEBUG nova.compute.provider_tree [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.166246] env[61629]: DEBUG nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 763.225186] env[61629]: DEBUG nova.scheduler.client.report [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 763.283276] env[61629]: DEBUG nova.compute.manager [req-0d801653-a7de-40d4-8a56-91af05d60756 req-fc055044-41b9-4ac1-ae94-7886423d906f service nova] [instance: f5830e36-257a-418a-add6-01195bb7d103] Received event network-changed-939df38e-b1dd-4f13-afa2-3a0385b51db1 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 763.283566] env[61629]: DEBUG nova.compute.manager [req-0d801653-a7de-40d4-8a56-91af05d60756 req-fc055044-41b9-4ac1-ae94-7886423d906f service nova] [instance: f5830e36-257a-418a-add6-01195bb7d103] Refreshing instance network info cache due to event network-changed-939df38e-b1dd-4f13-afa2-3a0385b51db1. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 763.283824] env[61629]: DEBUG oslo_concurrency.lockutils [req-0d801653-a7de-40d4-8a56-91af05d60756 req-fc055044-41b9-4ac1-ae94-7886423d906f service nova] Acquiring lock "refresh_cache-f5830e36-257a-418a-add6-01195bb7d103" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.283992] env[61629]: DEBUG oslo_concurrency.lockutils [req-0d801653-a7de-40d4-8a56-91af05d60756 req-fc055044-41b9-4ac1-ae94-7886423d906f service nova] Acquired lock "refresh_cache-f5830e36-257a-418a-add6-01195bb7d103" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.285270] env[61629]: DEBUG nova.network.neutron [req-0d801653-a7de-40d4-8a56-91af05d60756 req-fc055044-41b9-4ac1-ae94-7886423d906f service nova] [instance: f5830e36-257a-418a-add6-01195bb7d103] Refreshing network info cache for port 939df38e-b1dd-4f13-afa2-3a0385b51db1 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 763.369425] env[61629]: DEBUG nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 763.396253] env[61629]: DEBUG nova.virt.hardware [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 763.396253] env[61629]: DEBUG nova.virt.hardware [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 763.396253] env[61629]: DEBUG nova.virt.hardware [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 763.396428] env[61629]: DEBUG nova.virt.hardware [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 763.396428] env[61629]: DEBUG nova.virt.hardware [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 763.396428] env[61629]: DEBUG nova.virt.hardware [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 763.396428] env[61629]: DEBUG nova.virt.hardware [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 763.396428] env[61629]: DEBUG nova.virt.hardware [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 763.396618] env[61629]: DEBUG nova.virt.hardware [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 763.396931] env[61629]: DEBUG nova.virt.hardware [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 763.397364] env[61629]: DEBUG nova.virt.hardware [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 763.399573] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf92d99-8b85-48fa-9acf-b42022300c99 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.408027] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a95bb6c-0943-4441-a9c4-d5c78c43ce09 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.477616] env[61629]: ERROR nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 939df38e-b1dd-4f13-afa2-3a0385b51db1, please check neutron logs for more information. [ 763.477616] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 763.477616] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.477616] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 763.477616] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 763.477616] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 763.477616] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 763.477616] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 763.477616] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.477616] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 763.477616] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.477616] env[61629]: ERROR nova.compute.manager raise self.value [ 763.477616] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 763.477616] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 763.477616] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.477616] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 763.478123] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.478123] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 763.478123] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 939df38e-b1dd-4f13-afa2-3a0385b51db1, please check neutron logs for more information. [ 763.478123] env[61629]: ERROR nova.compute.manager [ 763.478123] env[61629]: Traceback (most recent call last): [ 763.478123] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 763.478123] env[61629]: listener.cb(fileno) [ 763.478123] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 763.478123] env[61629]: result = function(*args, **kwargs) [ 763.478123] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 763.478123] env[61629]: return func(*args, **kwargs) [ 763.478123] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 763.478123] env[61629]: raise e [ 763.478123] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.478123] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 763.478123] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 763.478123] env[61629]: created_port_ids = self._update_ports_for_instance( [ 763.478123] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 763.478123] env[61629]: with excutils.save_and_reraise_exception(): [ 763.478123] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.478123] env[61629]: self.force_reraise() [ 763.478123] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.478123] env[61629]: raise self.value [ 763.478123] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 763.478123] env[61629]: updated_port = self._update_port( [ 763.478123] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.478123] env[61629]: _ensure_no_port_binding_failure(port) [ 763.478123] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.478123] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 763.479384] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 939df38e-b1dd-4f13-afa2-3a0385b51db1, please check neutron logs for more information. [ 763.479384] env[61629]: Removing descriptor: 21 [ 763.479384] env[61629]: ERROR nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 939df38e-b1dd-4f13-afa2-3a0385b51db1, please check neutron logs for more information. [ 763.479384] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] Traceback (most recent call last): [ 763.479384] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 763.479384] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] yield resources [ 763.479384] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 763.479384] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] self.driver.spawn(context, instance, image_meta, [ 763.479384] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 763.479384] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] self._vmops.spawn(context, instance, image_meta, injected_files, [ 763.479384] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 763.479384] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] vm_ref = self.build_virtual_machine(instance, [ 763.479981] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 763.479981] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] vif_infos = vmwarevif.get_vif_info(self._session, [ 763.479981] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 763.479981] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] for vif in network_info: [ 763.479981] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 763.479981] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] return self._sync_wrapper(fn, *args, **kwargs) [ 763.479981] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 763.479981] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] self.wait() [ 763.479981] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 763.479981] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] self[:] = self._gt.wait() [ 763.479981] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 763.479981] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] return self._exit_event.wait() [ 763.479981] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 763.480486] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] result = hub.switch() [ 763.480486] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 763.480486] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] return self.greenlet.switch() [ 763.480486] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 763.480486] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] result = function(*args, **kwargs) [ 763.480486] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 763.480486] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] return func(*args, **kwargs) [ 763.480486] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 763.480486] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] raise e [ 763.480486] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.480486] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] nwinfo = self.network_api.allocate_for_instance( [ 763.480486] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 763.480486] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] created_port_ids = self._update_ports_for_instance( [ 763.481153] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 763.481153] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] with excutils.save_and_reraise_exception(): [ 763.481153] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.481153] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] self.force_reraise() [ 763.481153] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.481153] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] raise self.value [ 763.481153] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 763.481153] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] updated_port = self._update_port( [ 763.481153] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.481153] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] _ensure_no_port_binding_failure(port) [ 763.481153] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.481153] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] raise exception.PortBindingFailed(port_id=port['id']) [ 763.481727] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] nova.exception.PortBindingFailed: Binding failed for port 939df38e-b1dd-4f13-afa2-3a0385b51db1, please check neutron logs for more information. [ 763.481727] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] [ 763.481727] env[61629]: INFO nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Terminating instance [ 763.482143] env[61629]: DEBUG oslo_concurrency.lockutils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Acquiring lock "refresh_cache-f5830e36-257a-418a-add6-01195bb7d103" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.691974] env[61629]: DEBUG oslo_concurrency.lockutils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.732133] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.732702] env[61629]: DEBUG nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 763.735733] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.376s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.736722] env[61629]: INFO nova.compute.claims [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 763.805019] env[61629]: DEBUG nova.network.neutron [req-0d801653-a7de-40d4-8a56-91af05d60756 req-fc055044-41b9-4ac1-ae94-7886423d906f service nova] [instance: f5830e36-257a-418a-add6-01195bb7d103] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.874194] env[61629]: DEBUG nova.network.neutron [req-0d801653-a7de-40d4-8a56-91af05d60756 req-fc055044-41b9-4ac1-ae94-7886423d906f service nova] [instance: f5830e36-257a-418a-add6-01195bb7d103] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.240589] env[61629]: DEBUG nova.compute.utils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 764.243980] env[61629]: DEBUG nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 764.244158] env[61629]: DEBUG nova.network.neutron [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 764.299405] env[61629]: DEBUG nova.policy [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a50df5fb39b404090e7ff99f8ff1829', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9f7fbb4822c44636b7ad99e213792a5d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 764.377433] env[61629]: DEBUG oslo_concurrency.lockutils [req-0d801653-a7de-40d4-8a56-91af05d60756 req-fc055044-41b9-4ac1-ae94-7886423d906f service nova] Releasing lock "refresh_cache-f5830e36-257a-418a-add6-01195bb7d103" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.379299] env[61629]: DEBUG oslo_concurrency.lockutils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Acquired lock "refresh_cache-f5830e36-257a-418a-add6-01195bb7d103" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.379713] env[61629]: DEBUG nova.network.neutron [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 764.566999] env[61629]: DEBUG nova.network.neutron [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Successfully created port: 5f5e81fd-f5d2-4652-a23f-3e4a7ca1d8aa {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 764.745377] env[61629]: DEBUG nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 764.908087] env[61629]: DEBUG nova.network.neutron [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.997139] env[61629]: DEBUG nova.network.neutron [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.119968] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae6a8f9-4a0b-4908-990b-6c402baff3d1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.130157] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb31d64c-732a-4286-9076-80decb6c2fb1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.167306] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feee84eb-6fe9-413e-a1d0-949f5c103a0d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.175282] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdd8802-b5f7-49d5-a484-6c4ddb328d9d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.188614] env[61629]: DEBUG nova.compute.provider_tree [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.363049] env[61629]: DEBUG nova.compute.manager [req-99038d55-72e8-4a7c-81de-6eb21f44f9ee req-e0058ff4-eb20-4c21-b77c-6eed6658c2cb service nova] [instance: f5830e36-257a-418a-add6-01195bb7d103] Received event network-vif-deleted-939df38e-b1dd-4f13-afa2-3a0385b51db1 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 765.500747] env[61629]: DEBUG oslo_concurrency.lockutils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Releasing lock "refresh_cache-f5830e36-257a-418a-add6-01195bb7d103" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.501187] env[61629]: DEBUG nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 765.501383] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 765.501678] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d81a9a8b-7957-4ac7-ab4f-ff3becb8e1dc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.511460] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8442c51d-e848-4b52-a1a7-bc3b19c85d6d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.532696] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f5830e36-257a-418a-add6-01195bb7d103 could not be found. [ 765.532939] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 765.533137] env[61629]: INFO nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Took 0.03 seconds to destroy the instance on the hypervisor. [ 765.533382] env[61629]: DEBUG oslo.service.loopingcall [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 765.533601] env[61629]: DEBUG nova.compute.manager [-] [instance: f5830e36-257a-418a-add6-01195bb7d103] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 765.533700] env[61629]: DEBUG nova.network.neutron [-] [instance: f5830e36-257a-418a-add6-01195bb7d103] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 765.549781] env[61629]: DEBUG nova.network.neutron [-] [instance: f5830e36-257a-418a-add6-01195bb7d103] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.691581] env[61629]: DEBUG nova.scheduler.client.report [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 765.722227] env[61629]: ERROR nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5f5e81fd-f5d2-4652-a23f-3e4a7ca1d8aa, please check neutron logs for more information. [ 765.722227] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 765.722227] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 765.722227] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 765.722227] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.722227] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 765.722227] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.722227] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 765.722227] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.722227] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 765.722227] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.722227] env[61629]: ERROR nova.compute.manager raise self.value [ 765.722227] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.722227] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 765.722227] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.722227] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 765.722868] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.722868] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 765.722868] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5f5e81fd-f5d2-4652-a23f-3e4a7ca1d8aa, please check neutron logs for more information. [ 765.722868] env[61629]: ERROR nova.compute.manager [ 765.722868] env[61629]: Traceback (most recent call last): [ 765.722868] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 765.722868] env[61629]: listener.cb(fileno) [ 765.722868] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 765.722868] env[61629]: result = function(*args, **kwargs) [ 765.722868] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 765.722868] env[61629]: return func(*args, **kwargs) [ 765.722868] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 765.722868] env[61629]: raise e [ 765.722868] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 765.722868] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 765.722868] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.722868] env[61629]: created_port_ids = self._update_ports_for_instance( [ 765.722868] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.722868] env[61629]: with excutils.save_and_reraise_exception(): [ 765.722868] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.722868] env[61629]: self.force_reraise() [ 765.722868] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.722868] env[61629]: raise self.value [ 765.722868] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.722868] env[61629]: updated_port = self._update_port( [ 765.722868] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.722868] env[61629]: _ensure_no_port_binding_failure(port) [ 765.722868] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.722868] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 765.723840] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 5f5e81fd-f5d2-4652-a23f-3e4a7ca1d8aa, please check neutron logs for more information. [ 765.723840] env[61629]: Removing descriptor: 21 [ 765.757867] env[61629]: DEBUG nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 765.784829] env[61629]: DEBUG nova.virt.hardware [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 765.785118] env[61629]: DEBUG nova.virt.hardware [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 765.785284] env[61629]: DEBUG nova.virt.hardware [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 765.785543] env[61629]: DEBUG nova.virt.hardware [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 765.785615] env[61629]: DEBUG nova.virt.hardware [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 765.785742] env[61629]: DEBUG nova.virt.hardware [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 765.785944] env[61629]: DEBUG nova.virt.hardware [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 765.786131] env[61629]: DEBUG nova.virt.hardware [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 765.786377] env[61629]: DEBUG nova.virt.hardware [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 765.786578] env[61629]: DEBUG nova.virt.hardware [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 765.786759] env[61629]: DEBUG nova.virt.hardware [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 765.787605] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf5e7ed-a104-425c-b629-884373760c39 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.795524] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df61d17b-81d1-4bab-ae10-a618f0eee564 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.810809] env[61629]: ERROR nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5f5e81fd-f5d2-4652-a23f-3e4a7ca1d8aa, please check neutron logs for more information. [ 765.810809] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Traceback (most recent call last): [ 765.810809] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 765.810809] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] yield resources [ 765.810809] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 765.810809] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] self.driver.spawn(context, instance, image_meta, [ 765.810809] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 765.810809] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 765.810809] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 765.810809] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] vm_ref = self.build_virtual_machine(instance, [ 765.810809] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 765.811280] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] vif_infos = vmwarevif.get_vif_info(self._session, [ 765.811280] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 765.811280] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] for vif in network_info: [ 765.811280] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 765.811280] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] return self._sync_wrapper(fn, *args, **kwargs) [ 765.811280] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 765.811280] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] self.wait() [ 765.811280] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 765.811280] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] self[:] = self._gt.wait() [ 765.811280] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 765.811280] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] return self._exit_event.wait() [ 765.811280] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 765.811280] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] current.throw(*self._exc) [ 765.811725] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 765.811725] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] result = function(*args, **kwargs) [ 765.811725] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 765.811725] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] return func(*args, **kwargs) [ 765.811725] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 765.811725] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] raise e [ 765.811725] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 765.811725] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] nwinfo = self.network_api.allocate_for_instance( [ 765.811725] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 765.811725] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] created_port_ids = self._update_ports_for_instance( [ 765.811725] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 765.811725] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] with excutils.save_and_reraise_exception(): [ 765.811725] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.812336] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] self.force_reraise() [ 765.812336] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.812336] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] raise self.value [ 765.812336] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 765.812336] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] updated_port = self._update_port( [ 765.812336] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.812336] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] _ensure_no_port_binding_failure(port) [ 765.812336] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.812336] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] raise exception.PortBindingFailed(port_id=port['id']) [ 765.812336] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] nova.exception.PortBindingFailed: Binding failed for port 5f5e81fd-f5d2-4652-a23f-3e4a7ca1d8aa, please check neutron logs for more information. [ 765.812336] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] [ 765.812336] env[61629]: INFO nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Terminating instance [ 765.813965] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Acquiring lock "refresh_cache-52816a66-442f-4869-aee3-0cebd6f5e9bf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.814142] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Acquired lock "refresh_cache-52816a66-442f-4869-aee3-0cebd6f5e9bf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.814350] env[61629]: DEBUG nova.network.neutron [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 766.052728] env[61629]: DEBUG nova.network.neutron [-] [instance: f5830e36-257a-418a-add6-01195bb7d103] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.197083] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.197481] env[61629]: DEBUG nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 766.200090] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.858s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.202214] env[61629]: INFO nova.compute.claims [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 766.332322] env[61629]: DEBUG nova.network.neutron [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 766.393369] env[61629]: DEBUG nova.network.neutron [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.557892] env[61629]: INFO nova.compute.manager [-] [instance: f5830e36-257a-418a-add6-01195bb7d103] Took 1.02 seconds to deallocate network for instance. [ 766.560263] env[61629]: DEBUG nova.compute.claims [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 766.560436] env[61629]: DEBUG oslo_concurrency.lockutils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.707620] env[61629]: DEBUG nova.compute.utils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 766.712087] env[61629]: DEBUG nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 766.712378] env[61629]: DEBUG nova.network.neutron [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 766.749228] env[61629]: DEBUG nova.policy [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15c08213249d436d8cc9b3e6dea9aad3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '690494f653274225a0274d08c2d7c62f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 766.898130] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Releasing lock "refresh_cache-52816a66-442f-4869-aee3-0cebd6f5e9bf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.898582] env[61629]: DEBUG nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 766.898973] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 766.899096] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a5a0565f-3510-44d7-9c34-c80deaf56e12 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.908808] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428fdfa3-b563-40d2-9e2c-e589d6831bf1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.932116] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 52816a66-442f-4869-aee3-0cebd6f5e9bf could not be found. [ 766.932116] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 766.932116] env[61629]: INFO nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Took 0.03 seconds to destroy the instance on the hypervisor. [ 766.932116] env[61629]: DEBUG oslo.service.loopingcall [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 766.932116] env[61629]: DEBUG nova.compute.manager [-] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 766.932116] env[61629]: DEBUG nova.network.neutron [-] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 766.951630] env[61629]: DEBUG nova.network.neutron [-] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.068565] env[61629]: DEBUG nova.network.neutron [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Successfully created port: 50771be9-1b74-455d-823d-98060158af48 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 767.216517] env[61629]: DEBUG nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 767.389445] env[61629]: DEBUG nova.compute.manager [req-73b580c1-4121-4f86-85cb-8f1b889b450a req-9fb3e536-8f61-46f6-a34c-70996de2f634 service nova] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Received event network-changed-5f5e81fd-f5d2-4652-a23f-3e4a7ca1d8aa {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 767.389445] env[61629]: DEBUG nova.compute.manager [req-73b580c1-4121-4f86-85cb-8f1b889b450a req-9fb3e536-8f61-46f6-a34c-70996de2f634 service nova] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Refreshing instance network info cache due to event network-changed-5f5e81fd-f5d2-4652-a23f-3e4a7ca1d8aa. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 767.389445] env[61629]: DEBUG oslo_concurrency.lockutils [req-73b580c1-4121-4f86-85cb-8f1b889b450a req-9fb3e536-8f61-46f6-a34c-70996de2f634 service nova] Acquiring lock "refresh_cache-52816a66-442f-4869-aee3-0cebd6f5e9bf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.389445] env[61629]: DEBUG oslo_concurrency.lockutils [req-73b580c1-4121-4f86-85cb-8f1b889b450a req-9fb3e536-8f61-46f6-a34c-70996de2f634 service nova] Acquired lock "refresh_cache-52816a66-442f-4869-aee3-0cebd6f5e9bf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.389445] env[61629]: DEBUG nova.network.neutron [req-73b580c1-4121-4f86-85cb-8f1b889b450a req-9fb3e536-8f61-46f6-a34c-70996de2f634 service nova] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Refreshing network info cache for port 5f5e81fd-f5d2-4652-a23f-3e4a7ca1d8aa {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 767.454861] env[61629]: DEBUG nova.network.neutron [-] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.538438] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006c7790-fed3-4664-9103-39d333ca9995 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.550382] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a179afaa-16db-4f1b-960c-ac8d425ad512 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.581661] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da96576f-e6b1-4d0a-acb6-0be833ec2771 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.588779] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742092e7-a9e1-4349-9251-658ff540f9ac {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.601718] env[61629]: DEBUG nova.compute.provider_tree [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.908550] env[61629]: DEBUG nova.network.neutron [req-73b580c1-4121-4f86-85cb-8f1b889b450a req-9fb3e536-8f61-46f6-a34c-70996de2f634 service nova] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.961210] env[61629]: DEBUG nova.network.neutron [req-73b580c1-4121-4f86-85cb-8f1b889b450a req-9fb3e536-8f61-46f6-a34c-70996de2f634 service nova] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.962517] env[61629]: INFO nova.compute.manager [-] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Took 1.03 seconds to deallocate network for instance. [ 767.966857] env[61629]: DEBUG nova.compute.claims [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 767.967054] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.984867] env[61629]: ERROR nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 50771be9-1b74-455d-823d-98060158af48, please check neutron logs for more information. [ 767.984867] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 767.984867] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 767.984867] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 767.984867] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 767.984867] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 767.984867] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 767.984867] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 767.984867] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 767.984867] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 767.984867] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 767.984867] env[61629]: ERROR nova.compute.manager raise self.value [ 767.984867] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 767.984867] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 767.984867] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 767.984867] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 767.985414] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 767.985414] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 767.985414] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 50771be9-1b74-455d-823d-98060158af48, please check neutron logs for more information. [ 767.985414] env[61629]: ERROR nova.compute.manager [ 767.985414] env[61629]: Traceback (most recent call last): [ 767.985414] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 767.985414] env[61629]: listener.cb(fileno) [ 767.985414] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 767.985414] env[61629]: result = function(*args, **kwargs) [ 767.985414] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 767.985414] env[61629]: return func(*args, **kwargs) [ 767.985414] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 767.985414] env[61629]: raise e [ 767.985414] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 767.985414] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 767.985414] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 767.985414] env[61629]: created_port_ids = self._update_ports_for_instance( [ 767.985414] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 767.985414] env[61629]: with excutils.save_and_reraise_exception(): [ 767.985414] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 767.985414] env[61629]: self.force_reraise() [ 767.985414] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 767.985414] env[61629]: raise self.value [ 767.985414] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 767.985414] env[61629]: updated_port = self._update_port( [ 767.985414] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 767.985414] env[61629]: _ensure_no_port_binding_failure(port) [ 767.985414] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 767.985414] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 767.986413] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 50771be9-1b74-455d-823d-98060158af48, please check neutron logs for more information. [ 767.986413] env[61629]: Removing descriptor: 21 [ 768.104756] env[61629]: DEBUG nova.scheduler.client.report [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 768.227027] env[61629]: DEBUG nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 768.250324] env[61629]: DEBUG nova.virt.hardware [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 768.250589] env[61629]: DEBUG nova.virt.hardware [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 768.250745] env[61629]: DEBUG nova.virt.hardware [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 768.251705] env[61629]: DEBUG nova.virt.hardware [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 768.251705] env[61629]: DEBUG nova.virt.hardware [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 768.251705] env[61629]: DEBUG nova.virt.hardware [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 768.251705] env[61629]: DEBUG nova.virt.hardware [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 768.251705] env[61629]: DEBUG nova.virt.hardware [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 768.251943] env[61629]: DEBUG nova.virt.hardware [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 768.252497] env[61629]: DEBUG nova.virt.hardware [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 768.252718] env[61629]: DEBUG nova.virt.hardware [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 768.253973] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821ae927-5886-455c-8d7a-30cb1d1f3d31 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.262112] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e06b4d-8d66-4c41-9f78-07b2c7ba529e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.275953] env[61629]: ERROR nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 50771be9-1b74-455d-823d-98060158af48, please check neutron logs for more information. [ 768.275953] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Traceback (most recent call last): [ 768.275953] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 768.275953] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] yield resources [ 768.275953] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 768.275953] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] self.driver.spawn(context, instance, image_meta, [ 768.275953] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 768.275953] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] self._vmops.spawn(context, instance, image_meta, injected_files, [ 768.275953] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 768.275953] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] vm_ref = self.build_virtual_machine(instance, [ 768.275953] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 768.276403] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] vif_infos = vmwarevif.get_vif_info(self._session, [ 768.276403] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 768.276403] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] for vif in network_info: [ 768.276403] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 768.276403] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] return self._sync_wrapper(fn, *args, **kwargs) [ 768.276403] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 768.276403] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] self.wait() [ 768.276403] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 768.276403] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] self[:] = self._gt.wait() [ 768.276403] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 768.276403] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] return self._exit_event.wait() [ 768.276403] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 768.276403] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] current.throw(*self._exc) [ 768.276872] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 768.276872] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] result = function(*args, **kwargs) [ 768.276872] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 768.276872] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] return func(*args, **kwargs) [ 768.276872] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 768.276872] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] raise e [ 768.276872] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 768.276872] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] nwinfo = self.network_api.allocate_for_instance( [ 768.276872] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 768.276872] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] created_port_ids = self._update_ports_for_instance( [ 768.276872] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 768.276872] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] with excutils.save_and_reraise_exception(): [ 768.276872] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 768.277327] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] self.force_reraise() [ 768.277327] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 768.277327] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] raise self.value [ 768.277327] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 768.277327] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] updated_port = self._update_port( [ 768.277327] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 768.277327] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] _ensure_no_port_binding_failure(port) [ 768.277327] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 768.277327] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] raise exception.PortBindingFailed(port_id=port['id']) [ 768.277327] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] nova.exception.PortBindingFailed: Binding failed for port 50771be9-1b74-455d-823d-98060158af48, please check neutron logs for more information. [ 768.277327] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] [ 768.277327] env[61629]: INFO nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Terminating instance [ 768.278360] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Acquiring lock "refresh_cache-fa8a181b-2170-4c38-98d6-adc4e5a80f94" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.278515] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Acquired lock "refresh_cache-fa8a181b-2170-4c38-98d6-adc4e5a80f94" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.278745] env[61629]: DEBUG nova.network.neutron [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 768.464877] env[61629]: DEBUG oslo_concurrency.lockutils [req-73b580c1-4121-4f86-85cb-8f1b889b450a req-9fb3e536-8f61-46f6-a34c-70996de2f634 service nova] Releasing lock "refresh_cache-52816a66-442f-4869-aee3-0cebd6f5e9bf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.465184] env[61629]: DEBUG nova.compute.manager [req-73b580c1-4121-4f86-85cb-8f1b889b450a req-9fb3e536-8f61-46f6-a34c-70996de2f634 service nova] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Received event network-vif-deleted-5f5e81fd-f5d2-4652-a23f-3e4a7ca1d8aa {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 768.610166] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.610707] env[61629]: DEBUG nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 768.613196] env[61629]: DEBUG oslo_concurrency.lockutils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.605s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.799262] env[61629]: DEBUG nova.network.neutron [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 768.877985] env[61629]: DEBUG nova.network.neutron [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.118727] env[61629]: DEBUG nova.compute.utils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 769.123464] env[61629]: DEBUG nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 769.123660] env[61629]: DEBUG nova.network.neutron [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 769.174103] env[61629]: DEBUG nova.policy [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37eecff28da94476a41ca93b5ed20ced', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '31bd580690d14cf69018357ebe226324', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 769.381217] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Releasing lock "refresh_cache-fa8a181b-2170-4c38-98d6-adc4e5a80f94" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.381684] env[61629]: DEBUG nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 769.381895] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 769.382774] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e860e036-a6b2-40e9-9d04-db8a46b95b5d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.385557] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8bf62450-68c6-47ed-86d1-66cdbc2a6ce5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.393513] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0a9d62-a38d-4715-a738-5a7ebcdd9a9a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.398992] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded351be-570e-497d-b639-62d67dd1c3d7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.420349] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fa8a181b-2170-4c38-98d6-adc4e5a80f94 could not be found. [ 769.420759] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 769.421036] env[61629]: INFO nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Took 0.04 seconds to destroy the instance on the hypervisor. [ 769.421846] env[61629]: DEBUG oslo.service.loopingcall [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 769.452010] env[61629]: DEBUG nova.compute.manager [-] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 769.452200] env[61629]: DEBUG nova.network.neutron [-] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 769.454854] env[61629]: DEBUG nova.compute.manager [req-9e3d66b0-d9bb-4f86-8c51-e21a4e226399 req-5c5851be-a00d-4ef4-b530-3005063e27ad service nova] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Received event network-changed-50771be9-1b74-455d-823d-98060158af48 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 769.455090] env[61629]: DEBUG nova.compute.manager [req-9e3d66b0-d9bb-4f86-8c51-e21a4e226399 req-5c5851be-a00d-4ef4-b530-3005063e27ad service nova] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Refreshing instance network info cache due to event network-changed-50771be9-1b74-455d-823d-98060158af48. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 769.455411] env[61629]: DEBUG oslo_concurrency.lockutils [req-9e3d66b0-d9bb-4f86-8c51-e21a4e226399 req-5c5851be-a00d-4ef4-b530-3005063e27ad service nova] Acquiring lock "refresh_cache-fa8a181b-2170-4c38-98d6-adc4e5a80f94" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.455731] env[61629]: DEBUG oslo_concurrency.lockutils [req-9e3d66b0-d9bb-4f86-8c51-e21a4e226399 req-5c5851be-a00d-4ef4-b530-3005063e27ad service nova] Acquired lock "refresh_cache-fa8a181b-2170-4c38-98d6-adc4e5a80f94" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.455820] env[61629]: DEBUG nova.network.neutron [req-9e3d66b0-d9bb-4f86-8c51-e21a4e226399 req-5c5851be-a00d-4ef4-b530-3005063e27ad service nova] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Refreshing network info cache for port 50771be9-1b74-455d-823d-98060158af48 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 769.458156] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c80afa4-ab40-4481-bc21-3f79fbd6afc1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.466413] env[61629]: DEBUG nova.network.neutron [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Successfully created port: 8e84f788-48f9-4e4e-9537-d543277d505e {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 769.469533] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b716c44-7047-4312-a560-af1cbc89ec22 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.475274] env[61629]: DEBUG nova.network.neutron [-] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.486422] env[61629]: DEBUG nova.compute.provider_tree [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.487696] env[61629]: DEBUG nova.network.neutron [-] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.624597] env[61629]: DEBUG nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 769.977502] env[61629]: DEBUG nova.network.neutron [req-9e3d66b0-d9bb-4f86-8c51-e21a4e226399 req-5c5851be-a00d-4ef4-b530-3005063e27ad service nova] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.990193] env[61629]: DEBUG nova.scheduler.client.report [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 769.995148] env[61629]: INFO nova.compute.manager [-] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Took 0.54 seconds to deallocate network for instance. [ 769.996147] env[61629]: DEBUG nova.compute.claims [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 769.996325] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.114265] env[61629]: DEBUG nova.network.neutron [req-9e3d66b0-d9bb-4f86-8c51-e21a4e226399 req-5c5851be-a00d-4ef4-b530-3005063e27ad service nova] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.366780] env[61629]: ERROR nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8e84f788-48f9-4e4e-9537-d543277d505e, please check neutron logs for more information. [ 770.366780] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 770.366780] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 770.366780] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 770.366780] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 770.366780] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 770.366780] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 770.366780] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 770.366780] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.366780] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 770.366780] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.366780] env[61629]: ERROR nova.compute.manager raise self.value [ 770.366780] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 770.366780] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 770.366780] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.366780] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 770.367315] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.367315] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 770.367315] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8e84f788-48f9-4e4e-9537-d543277d505e, please check neutron logs for more information. [ 770.367315] env[61629]: ERROR nova.compute.manager [ 770.367315] env[61629]: Traceback (most recent call last): [ 770.367315] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 770.367315] env[61629]: listener.cb(fileno) [ 770.367315] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 770.367315] env[61629]: result = function(*args, **kwargs) [ 770.367315] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 770.367315] env[61629]: return func(*args, **kwargs) [ 770.367315] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 770.367315] env[61629]: raise e [ 770.367315] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 770.367315] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 770.367315] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 770.367315] env[61629]: created_port_ids = self._update_ports_for_instance( [ 770.367315] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 770.367315] env[61629]: with excutils.save_and_reraise_exception(): [ 770.367315] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.367315] env[61629]: self.force_reraise() [ 770.367315] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.367315] env[61629]: raise self.value [ 770.367315] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 770.367315] env[61629]: updated_port = self._update_port( [ 770.367315] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.367315] env[61629]: _ensure_no_port_binding_failure(port) [ 770.367315] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.367315] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 770.368277] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 8e84f788-48f9-4e4e-9537-d543277d505e, please check neutron logs for more information. [ 770.368277] env[61629]: Removing descriptor: 21 [ 770.497133] env[61629]: DEBUG oslo_concurrency.lockutils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.884s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.497791] env[61629]: ERROR nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 86fd11c4-67bd-4e1f-b34b-1b2c9c5a4bac, please check neutron logs for more information. [ 770.497791] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] Traceback (most recent call last): [ 770.497791] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 770.497791] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] self.driver.spawn(context, instance, image_meta, [ 770.497791] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 770.497791] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 770.497791] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 770.497791] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] vm_ref = self.build_virtual_machine(instance, [ 770.497791] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 770.497791] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] vif_infos = vmwarevif.get_vif_info(self._session, [ 770.497791] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 770.498335] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] for vif in network_info: [ 770.498335] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 770.498335] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] return self._sync_wrapper(fn, *args, **kwargs) [ 770.498335] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 770.498335] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] self.wait() [ 770.498335] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 770.498335] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] self[:] = self._gt.wait() [ 770.498335] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 770.498335] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] return self._exit_event.wait() [ 770.498335] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 770.498335] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] result = hub.switch() [ 770.498335] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 770.498335] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] return self.greenlet.switch() [ 770.498929] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 770.498929] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] result = function(*args, **kwargs) [ 770.498929] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 770.498929] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] return func(*args, **kwargs) [ 770.498929] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 770.498929] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] raise e [ 770.498929] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 770.498929] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] nwinfo = self.network_api.allocate_for_instance( [ 770.498929] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 770.498929] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] created_port_ids = self._update_ports_for_instance( [ 770.498929] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 770.498929] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] with excutils.save_and_reraise_exception(): [ 770.498929] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.499527] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] self.force_reraise() [ 770.499527] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.499527] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] raise self.value [ 770.499527] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 770.499527] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] updated_port = self._update_port( [ 770.499527] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.499527] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] _ensure_no_port_binding_failure(port) [ 770.499527] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.499527] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] raise exception.PortBindingFailed(port_id=port['id']) [ 770.499527] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] nova.exception.PortBindingFailed: Binding failed for port 86fd11c4-67bd-4e1f-b34b-1b2c9c5a4bac, please check neutron logs for more information. [ 770.499527] env[61629]: ERROR nova.compute.manager [instance: ad374170-21a1-4036-9804-b82493701abf] [ 770.499939] env[61629]: DEBUG nova.compute.utils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Binding failed for port 86fd11c4-67bd-4e1f-b34b-1b2c9c5a4bac, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 770.499939] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.492s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.502498] env[61629]: DEBUG nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Build of instance ad374170-21a1-4036-9804-b82493701abf was re-scheduled: Binding failed for port 86fd11c4-67bd-4e1f-b34b-1b2c9c5a4bac, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 770.502920] env[61629]: DEBUG nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 770.503159] env[61629]: DEBUG oslo_concurrency.lockutils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "refresh_cache-ad374170-21a1-4036-9804-b82493701abf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.503305] env[61629]: DEBUG oslo_concurrency.lockutils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquired lock "refresh_cache-ad374170-21a1-4036-9804-b82493701abf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.503460] env[61629]: DEBUG nova.network.neutron [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 770.618477] env[61629]: DEBUG oslo_concurrency.lockutils [req-9e3d66b0-d9bb-4f86-8c51-e21a4e226399 req-5c5851be-a00d-4ef4-b530-3005063e27ad service nova] Releasing lock "refresh_cache-fa8a181b-2170-4c38-98d6-adc4e5a80f94" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.618764] env[61629]: DEBUG nova.compute.manager [req-9e3d66b0-d9bb-4f86-8c51-e21a4e226399 req-5c5851be-a00d-4ef4-b530-3005063e27ad service nova] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Received event network-vif-deleted-50771be9-1b74-455d-823d-98060158af48 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 770.634876] env[61629]: DEBUG nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 770.659466] env[61629]: DEBUG nova.virt.hardware [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 770.659708] env[61629]: DEBUG nova.virt.hardware [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 770.659863] env[61629]: DEBUG nova.virt.hardware [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.660055] env[61629]: DEBUG nova.virt.hardware [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 770.660201] env[61629]: DEBUG nova.virt.hardware [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.660346] env[61629]: DEBUG nova.virt.hardware [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 770.660551] env[61629]: DEBUG nova.virt.hardware [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 770.660706] env[61629]: DEBUG nova.virt.hardware [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 770.660871] env[61629]: DEBUG nova.virt.hardware [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 770.661045] env[61629]: DEBUG nova.virt.hardware [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 770.661222] env[61629]: DEBUG nova.virt.hardware [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 770.662102] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f16079d-e9b0-472f-bf7d-21a882f97bb7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.670220] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce54a26d-53e0-4df4-8bd8-485ade8226e4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.683837] env[61629]: ERROR nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8e84f788-48f9-4e4e-9537-d543277d505e, please check neutron logs for more information. [ 770.683837] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Traceback (most recent call last): [ 770.683837] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 770.683837] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] yield resources [ 770.683837] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 770.683837] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] self.driver.spawn(context, instance, image_meta, [ 770.683837] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 770.683837] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 770.683837] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 770.683837] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] vm_ref = self.build_virtual_machine(instance, [ 770.683837] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 770.684315] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] vif_infos = vmwarevif.get_vif_info(self._session, [ 770.684315] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 770.684315] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] for vif in network_info: [ 770.684315] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 770.684315] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] return self._sync_wrapper(fn, *args, **kwargs) [ 770.684315] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 770.684315] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] self.wait() [ 770.684315] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 770.684315] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] self[:] = self._gt.wait() [ 770.684315] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 770.684315] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] return self._exit_event.wait() [ 770.684315] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 770.684315] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] current.throw(*self._exc) [ 770.684787] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 770.684787] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] result = function(*args, **kwargs) [ 770.684787] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 770.684787] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] return func(*args, **kwargs) [ 770.684787] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 770.684787] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] raise e [ 770.684787] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 770.684787] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] nwinfo = self.network_api.allocate_for_instance( [ 770.684787] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 770.684787] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] created_port_ids = self._update_ports_for_instance( [ 770.684787] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 770.684787] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] with excutils.save_and_reraise_exception(): [ 770.684787] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.685219] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] self.force_reraise() [ 770.685219] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.685219] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] raise self.value [ 770.685219] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 770.685219] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] updated_port = self._update_port( [ 770.685219] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.685219] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] _ensure_no_port_binding_failure(port) [ 770.685219] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.685219] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] raise exception.PortBindingFailed(port_id=port['id']) [ 770.685219] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] nova.exception.PortBindingFailed: Binding failed for port 8e84f788-48f9-4e4e-9537-d543277d505e, please check neutron logs for more information. [ 770.685219] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] [ 770.685219] env[61629]: INFO nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Terminating instance [ 770.686176] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Acquiring lock "refresh_cache-853f3cd8-c874-45e8-9e89-ee897dea87a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.686337] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Acquired lock "refresh_cache-853f3cd8-c874-45e8-9e89-ee897dea87a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.686503] env[61629]: DEBUG nova.network.neutron [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 771.024028] env[61629]: DEBUG nova.network.neutron [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.116148] env[61629]: DEBUG nova.network.neutron [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.204616] env[61629]: DEBUG nova.network.neutron [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.259731] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01c56d3-e0de-424c-b992-2ed909e73746 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.267398] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ada31a-10a1-4e4c-8193-c37fe7a4d030 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.297868] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0a23c8-f267-4d95-aac6-cdd1dcfed4e4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.305017] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25e6516-18e3-4e17-8764-984f235e5530 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.317754] env[61629]: DEBUG nova.compute.provider_tree [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.452412] env[61629]: DEBUG nova.network.neutron [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.494851] env[61629]: DEBUG nova.compute.manager [req-8fe7e12e-437e-418d-ad73-549fb3f36e2d req-329d1aa2-ac50-43d8-a184-8c2081590ad8 service nova] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Received event network-changed-8e84f788-48f9-4e4e-9537-d543277d505e {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 771.494851] env[61629]: DEBUG nova.compute.manager [req-8fe7e12e-437e-418d-ad73-549fb3f36e2d req-329d1aa2-ac50-43d8-a184-8c2081590ad8 service nova] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Refreshing instance network info cache due to event network-changed-8e84f788-48f9-4e4e-9537-d543277d505e. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 771.495270] env[61629]: DEBUG oslo_concurrency.lockutils [req-8fe7e12e-437e-418d-ad73-549fb3f36e2d req-329d1aa2-ac50-43d8-a184-8c2081590ad8 service nova] Acquiring lock "refresh_cache-853f3cd8-c874-45e8-9e89-ee897dea87a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 771.618949] env[61629]: DEBUG oslo_concurrency.lockutils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Releasing lock "refresh_cache-ad374170-21a1-4036-9804-b82493701abf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.619236] env[61629]: DEBUG nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 771.619420] env[61629]: DEBUG nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 771.619589] env[61629]: DEBUG nova.network.neutron [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 771.635395] env[61629]: DEBUG nova.network.neutron [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.820863] env[61629]: DEBUG nova.scheduler.client.report [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 771.955406] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Releasing lock "refresh_cache-853f3cd8-c874-45e8-9e89-ee897dea87a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.955834] env[61629]: DEBUG nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 771.956103] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 771.956419] env[61629]: DEBUG oslo_concurrency.lockutils [req-8fe7e12e-437e-418d-ad73-549fb3f36e2d req-329d1aa2-ac50-43d8-a184-8c2081590ad8 service nova] Acquired lock "refresh_cache-853f3cd8-c874-45e8-9e89-ee897dea87a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.956588] env[61629]: DEBUG nova.network.neutron [req-8fe7e12e-437e-418d-ad73-549fb3f36e2d req-329d1aa2-ac50-43d8-a184-8c2081590ad8 service nova] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Refreshing network info cache for port 8e84f788-48f9-4e4e-9537-d543277d505e {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 771.957619] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b2b8078-2dc4-4ca0-b299-6f87b99fd377 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.967560] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309ae1cd-c1c7-45f1-8ff1-6b551651329b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.988341] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 853f3cd8-c874-45e8-9e89-ee897dea87a3 could not be found. [ 771.988550] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 771.988728] env[61629]: INFO nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Took 0.03 seconds to destroy the instance on the hypervisor. [ 771.988997] env[61629]: DEBUG oslo.service.loopingcall [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 771.989254] env[61629]: DEBUG nova.compute.manager [-] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 771.989367] env[61629]: DEBUG nova.network.neutron [-] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 772.003671] env[61629]: DEBUG nova.network.neutron [-] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.138019] env[61629]: DEBUG nova.network.neutron [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.325427] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.826s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.326218] env[61629]: ERROR nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 12da692f-7fef-4759-89b1-a1e31061f346, please check neutron logs for more information. [ 772.326218] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Traceback (most recent call last): [ 772.326218] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 772.326218] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] self.driver.spawn(context, instance, image_meta, [ 772.326218] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 772.326218] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 772.326218] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 772.326218] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] vm_ref = self.build_virtual_machine(instance, [ 772.326218] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 772.326218] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] vif_infos = vmwarevif.get_vif_info(self._session, [ 772.326218] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 772.326618] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] for vif in network_info: [ 772.326618] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 772.326618] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] return self._sync_wrapper(fn, *args, **kwargs) [ 772.326618] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 772.326618] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] self.wait() [ 772.326618] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 772.326618] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] self[:] = self._gt.wait() [ 772.326618] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 772.326618] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] return self._exit_event.wait() [ 772.326618] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 772.326618] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] result = hub.switch() [ 772.326618] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 772.326618] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] return self.greenlet.switch() [ 772.327076] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 772.327076] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] result = function(*args, **kwargs) [ 772.327076] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 772.327076] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] return func(*args, **kwargs) [ 772.327076] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 772.327076] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] raise e [ 772.327076] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 772.327076] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] nwinfo = self.network_api.allocate_for_instance( [ 772.327076] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 772.327076] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] created_port_ids = self._update_ports_for_instance( [ 772.327076] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 772.327076] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] with excutils.save_and_reraise_exception(): [ 772.327076] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 772.327511] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] self.force_reraise() [ 772.327511] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 772.327511] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] raise self.value [ 772.327511] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 772.327511] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] updated_port = self._update_port( [ 772.327511] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 772.327511] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] _ensure_no_port_binding_failure(port) [ 772.327511] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 772.327511] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] raise exception.PortBindingFailed(port_id=port['id']) [ 772.327511] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] nova.exception.PortBindingFailed: Binding failed for port 12da692f-7fef-4759-89b1-a1e31061f346, please check neutron logs for more information. [ 772.327511] env[61629]: ERROR nova.compute.manager [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] [ 772.327885] env[61629]: DEBUG nova.compute.utils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Binding failed for port 12da692f-7fef-4759-89b1-a1e31061f346, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 772.328633] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.086s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.328689] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.328816] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61629) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 772.329493] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.238s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.331200] env[61629]: INFO nova.compute.claims [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 772.334230] env[61629]: DEBUG nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Build of instance 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe was re-scheduled: Binding failed for port 12da692f-7fef-4759-89b1-a1e31061f346, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 772.334661] env[61629]: DEBUG nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 772.334898] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquiring lock "refresh_cache-079cb97b-b7d4-4f25-9f1d-f77f34a2efbe" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.335066] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquired lock "refresh_cache-079cb97b-b7d4-4f25-9f1d-f77f34a2efbe" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.335226] env[61629]: DEBUG nova.network.neutron [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 772.337576] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d914ca-3628-4a8d-a60b-31745a126ee2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.346315] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f172f9-a946-4d15-be10-6bb9230d21df {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.360711] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbbe24a-f72b-4641-bf6d-6e72044abdec {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.368919] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb43394-8b7d-4cd4-b81d-b60c9eeaf509 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.396328] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181509MB free_disk=151GB free_vcpus=48 pci_devices=None {{(pid=61629) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 772.396491] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.483361] env[61629]: DEBUG nova.network.neutron [req-8fe7e12e-437e-418d-ad73-549fb3f36e2d req-329d1aa2-ac50-43d8-a184-8c2081590ad8 service nova] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.506660] env[61629]: DEBUG nova.network.neutron [-] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.563854] env[61629]: DEBUG nova.network.neutron [req-8fe7e12e-437e-418d-ad73-549fb3f36e2d req-329d1aa2-ac50-43d8-a184-8c2081590ad8 service nova] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.641204] env[61629]: INFO nova.compute.manager [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: ad374170-21a1-4036-9804-b82493701abf] Took 1.02 seconds to deallocate network for instance. [ 772.862008] env[61629]: DEBUG nova.network.neutron [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.942619] env[61629]: DEBUG nova.network.neutron [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.009065] env[61629]: INFO nova.compute.manager [-] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Took 1.02 seconds to deallocate network for instance. [ 773.011010] env[61629]: DEBUG nova.compute.claims [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 773.011204] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.066194] env[61629]: DEBUG oslo_concurrency.lockutils [req-8fe7e12e-437e-418d-ad73-549fb3f36e2d req-329d1aa2-ac50-43d8-a184-8c2081590ad8 service nova] Releasing lock "refresh_cache-853f3cd8-c874-45e8-9e89-ee897dea87a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.066442] env[61629]: DEBUG nova.compute.manager [req-8fe7e12e-437e-418d-ad73-549fb3f36e2d req-329d1aa2-ac50-43d8-a184-8c2081590ad8 service nova] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Received event network-vif-deleted-8e84f788-48f9-4e4e-9537-d543277d505e {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 773.445731] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Releasing lock "refresh_cache-079cb97b-b7d4-4f25-9f1d-f77f34a2efbe" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.445731] env[61629]: DEBUG nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 773.445731] env[61629]: DEBUG nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 773.445731] env[61629]: DEBUG nova.network.neutron [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 773.462565] env[61629]: DEBUG nova.network.neutron [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.593171] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336b8b0b-7ad2-4090-8c30-f719a7aa0af2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.600983] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe10ab2-67ff-44a6-8bdc-2aae6c45b8a3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.630128] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457dce92-7075-4943-823f-517fd8a786ce {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.637509] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301eadc3-69ab-42e2-b68b-179060faf8d6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.652976] env[61629]: DEBUG nova.compute.provider_tree [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.670213] env[61629]: INFO nova.scheduler.client.report [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Deleted allocations for instance ad374170-21a1-4036-9804-b82493701abf [ 773.965319] env[61629]: DEBUG nova.network.neutron [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.157092] env[61629]: DEBUG nova.scheduler.client.report [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 774.181721] env[61629]: DEBUG oslo_concurrency.lockutils [None req-34755297-1554-497e-bcf7-d7ea09c28cc3 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "ad374170-21a1-4036-9804-b82493701abf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 156.142s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.468058] env[61629]: INFO nova.compute.manager [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe] Took 1.02 seconds to deallocate network for instance. [ 774.663937] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.334s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.664501] env[61629]: DEBUG nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 774.667233] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.944s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.684452] env[61629]: DEBUG nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 775.174788] env[61629]: DEBUG nova.compute.utils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 775.179365] env[61629]: DEBUG nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 775.179551] env[61629]: DEBUG nova.network.neutron [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 775.203539] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.236458] env[61629]: DEBUG nova.policy [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c62f9a7c8b5f4ef985880339407b46a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0578ce75c37942d4ba6c8b862ceb7d92', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 775.425620] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa6cf15-686e-40c0-ab74-80c2218663b0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.434238] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d2eb54-a49f-420d-9796-a94f769cd7d8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.464098] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91650f4-eb7d-45cd-852c-2968d46150fe {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.470936] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570887c4-ffcc-4175-821a-03abebac2388 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.487291] env[61629]: DEBUG nova.compute.provider_tree [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.500592] env[61629]: INFO nova.scheduler.client.report [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Deleted allocations for instance 079cb97b-b7d4-4f25-9f1d-f77f34a2efbe [ 775.611753] env[61629]: DEBUG nova.network.neutron [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Successfully created port: 2e13df7b-e819-486c-a526-1917db0db79a {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 775.680240] env[61629]: DEBUG nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 775.993349] env[61629]: DEBUG nova.scheduler.client.report [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 776.007876] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dba8385f-7df0-4325-804a-bf6ecd8b9c87 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "079cb97b-b7d4-4f25-9f1d-f77f34a2efbe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 152.235s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.285971] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.285971] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.499355] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.832s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.499961] env[61629]: ERROR nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e9d30088-009c-4567-a13b-b3bc5766fc05, please check neutron logs for more information. [ 776.499961] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Traceback (most recent call last): [ 776.499961] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 776.499961] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] self.driver.spawn(context, instance, image_meta, [ 776.499961] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 776.499961] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 776.499961] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 776.499961] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] vm_ref = self.build_virtual_machine(instance, [ 776.499961] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 776.499961] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] vif_infos = vmwarevif.get_vif_info(self._session, [ 776.499961] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 776.500323] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] for vif in network_info: [ 776.500323] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 776.500323] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] return self._sync_wrapper(fn, *args, **kwargs) [ 776.500323] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 776.500323] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] self.wait() [ 776.500323] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 776.500323] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] self[:] = self._gt.wait() [ 776.500323] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 776.500323] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] return self._exit_event.wait() [ 776.500323] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 776.500323] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] current.throw(*self._exc) [ 776.500323] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 776.500323] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] result = function(*args, **kwargs) [ 776.500739] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 776.500739] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] return func(*args, **kwargs) [ 776.500739] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 776.500739] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] raise e [ 776.500739] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 776.500739] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] nwinfo = self.network_api.allocate_for_instance( [ 776.500739] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 776.500739] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] created_port_ids = self._update_ports_for_instance( [ 776.500739] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 776.500739] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] with excutils.save_and_reraise_exception(): [ 776.500739] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 776.500739] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] self.force_reraise() [ 776.500739] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 776.501237] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] raise self.value [ 776.501237] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 776.501237] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] updated_port = self._update_port( [ 776.501237] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 776.501237] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] _ensure_no_port_binding_failure(port) [ 776.501237] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 776.501237] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] raise exception.PortBindingFailed(port_id=port['id']) [ 776.501237] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] nova.exception.PortBindingFailed: Binding failed for port e9d30088-009c-4567-a13b-b3bc5766fc05, please check neutron logs for more information. [ 776.501237] env[61629]: ERROR nova.compute.manager [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] [ 776.501237] env[61629]: DEBUG nova.compute.utils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Binding failed for port e9d30088-009c-4567-a13b-b3bc5766fc05, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 776.501864] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.153s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.508017] env[61629]: DEBUG nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Build of instance 443ad254-3d5d-4fb8-a565-ce70c352e3f2 was re-scheduled: Binding failed for port e9d30088-009c-4567-a13b-b3bc5766fc05, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 776.508017] env[61629]: DEBUG nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 776.508017] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "refresh_cache-443ad254-3d5d-4fb8-a565-ce70c352e3f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.508017] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "refresh_cache-443ad254-3d5d-4fb8-a565-ce70c352e3f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.508238] env[61629]: DEBUG nova.network.neutron [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 776.510803] env[61629]: DEBUG nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 776.556405] env[61629]: DEBUG nova.compute.manager [req-cfc41490-a2a0-4505-ace5-3f5f20845b58 req-7e5473a4-7323-4f8f-8bd5-cfc5b8ed219c service nova] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Received event network-changed-2e13df7b-e819-486c-a526-1917db0db79a {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 776.558017] env[61629]: DEBUG nova.compute.manager [req-cfc41490-a2a0-4505-ace5-3f5f20845b58 req-7e5473a4-7323-4f8f-8bd5-cfc5b8ed219c service nova] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Refreshing instance network info cache due to event network-changed-2e13df7b-e819-486c-a526-1917db0db79a. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 776.558017] env[61629]: DEBUG oslo_concurrency.lockutils [req-cfc41490-a2a0-4505-ace5-3f5f20845b58 req-7e5473a4-7323-4f8f-8bd5-cfc5b8ed219c service nova] Acquiring lock "refresh_cache-3b7866fb-213a-46a7-b31c-4ce5598591c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.558017] env[61629]: DEBUG oslo_concurrency.lockutils [req-cfc41490-a2a0-4505-ace5-3f5f20845b58 req-7e5473a4-7323-4f8f-8bd5-cfc5b8ed219c service nova] Acquired lock "refresh_cache-3b7866fb-213a-46a7-b31c-4ce5598591c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.558017] env[61629]: DEBUG nova.network.neutron [req-cfc41490-a2a0-4505-ace5-3f5f20845b58 req-7e5473a4-7323-4f8f-8bd5-cfc5b8ed219c service nova] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Refreshing network info cache for port 2e13df7b-e819-486c-a526-1917db0db79a {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 776.695553] env[61629]: DEBUG nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 776.727129] env[61629]: DEBUG nova.virt.hardware [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 776.727638] env[61629]: DEBUG nova.virt.hardware [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 776.728031] env[61629]: DEBUG nova.virt.hardware [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 776.732015] env[61629]: DEBUG nova.virt.hardware [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 776.732015] env[61629]: DEBUG nova.virt.hardware [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 776.732015] env[61629]: DEBUG nova.virt.hardware [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 776.732015] env[61629]: DEBUG nova.virt.hardware [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 776.732015] env[61629]: DEBUG nova.virt.hardware [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 776.732015] env[61629]: DEBUG nova.virt.hardware [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 776.732312] env[61629]: DEBUG nova.virt.hardware [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 776.732312] env[61629]: DEBUG nova.virt.hardware [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 776.732312] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eae04a4-9670-4943-beab-4326869d7b9e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.739274] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe33658-230e-42d2-9740-83b17eb3a6db {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.756928] env[61629]: ERROR nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2e13df7b-e819-486c-a526-1917db0db79a, please check neutron logs for more information. [ 776.756928] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 776.756928] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 776.756928] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 776.756928] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 776.756928] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 776.756928] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 776.756928] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 776.756928] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 776.756928] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 776.756928] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 776.756928] env[61629]: ERROR nova.compute.manager raise self.value [ 776.756928] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 776.756928] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 776.756928] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 776.756928] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 776.757525] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 776.757525] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 776.757525] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2e13df7b-e819-486c-a526-1917db0db79a, please check neutron logs for more information. [ 776.757525] env[61629]: ERROR nova.compute.manager [ 776.757863] env[61629]: Traceback (most recent call last): [ 776.757959] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 776.757959] env[61629]: listener.cb(fileno) [ 776.758060] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 776.758060] env[61629]: result = function(*args, **kwargs) [ 776.758133] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 776.758133] env[61629]: return func(*args, **kwargs) [ 776.758213] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 776.758213] env[61629]: raise e [ 776.758282] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 776.758282] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 776.758348] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 776.758348] env[61629]: created_port_ids = self._update_ports_for_instance( [ 776.759022] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 776.759022] env[61629]: with excutils.save_and_reraise_exception(): [ 776.759022] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 776.759022] env[61629]: self.force_reraise() [ 776.759022] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 776.759022] env[61629]: raise self.value [ 776.759022] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 776.759022] env[61629]: updated_port = self._update_port( [ 776.759022] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 776.759022] env[61629]: _ensure_no_port_binding_failure(port) [ 776.759022] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 776.759022] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 776.759022] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 2e13df7b-e819-486c-a526-1917db0db79a, please check neutron logs for more information. [ 776.759022] env[61629]: Removing descriptor: 21 [ 776.761079] env[61629]: ERROR nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2e13df7b-e819-486c-a526-1917db0db79a, please check neutron logs for more information. [ 776.761079] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Traceback (most recent call last): [ 776.761079] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 776.761079] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] yield resources [ 776.761079] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 776.761079] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] self.driver.spawn(context, instance, image_meta, [ 776.761079] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 776.761079] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 776.761079] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 776.761079] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] vm_ref = self.build_virtual_machine(instance, [ 776.761079] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 776.761504] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] vif_infos = vmwarevif.get_vif_info(self._session, [ 776.761504] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 776.761504] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] for vif in network_info: [ 776.761504] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 776.761504] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] return self._sync_wrapper(fn, *args, **kwargs) [ 776.761504] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 776.761504] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] self.wait() [ 776.761504] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 776.761504] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] self[:] = self._gt.wait() [ 776.761504] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 776.761504] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] return self._exit_event.wait() [ 776.761504] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 776.761504] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] result = hub.switch() [ 776.762012] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 776.762012] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] return self.greenlet.switch() [ 776.762012] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 776.762012] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] result = function(*args, **kwargs) [ 776.762012] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 776.762012] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] return func(*args, **kwargs) [ 776.762012] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 776.762012] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] raise e [ 776.762012] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 776.762012] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] nwinfo = self.network_api.allocate_for_instance( [ 776.762012] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 776.762012] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] created_port_ids = self._update_ports_for_instance( [ 776.762012] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 776.762445] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] with excutils.save_and_reraise_exception(): [ 776.762445] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 776.762445] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] self.force_reraise() [ 776.762445] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 776.762445] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] raise self.value [ 776.762445] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 776.762445] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] updated_port = self._update_port( [ 776.762445] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 776.762445] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] _ensure_no_port_binding_failure(port) [ 776.762445] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 776.762445] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] raise exception.PortBindingFailed(port_id=port['id']) [ 776.762445] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] nova.exception.PortBindingFailed: Binding failed for port 2e13df7b-e819-486c-a526-1917db0db79a, please check neutron logs for more information. [ 776.762445] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] [ 776.762886] env[61629]: INFO nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Terminating instance [ 776.762886] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "refresh_cache-3b7866fb-213a-46a7-b31c-4ce5598591c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.031574] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.035141] env[61629]: DEBUG nova.network.neutron [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 777.078163] env[61629]: DEBUG nova.network.neutron [req-cfc41490-a2a0-4505-ace5-3f5f20845b58 req-7e5473a4-7323-4f8f-8bd5-cfc5b8ed219c service nova] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 777.138390] env[61629]: DEBUG nova.network.neutron [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.186380] env[61629]: DEBUG nova.network.neutron [req-cfc41490-a2a0-4505-ace5-3f5f20845b58 req-7e5473a4-7323-4f8f-8bd5-cfc5b8ed219c service nova] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.283959] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e8ae51-49f4-4c2e-9777-b95b99f806b8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.291373] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59199a0b-c272-4a31-ba6a-683129d3c39e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.320180] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d4d907-62aa-4d4a-aff0-24e3f532535c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.327698] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a690f6-e677-4743-bb93-7227dcfae803 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.341024] env[61629]: DEBUG nova.compute.provider_tree [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.641667] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "refresh_cache-443ad254-3d5d-4fb8-a565-ce70c352e3f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.641973] env[61629]: DEBUG nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 777.642202] env[61629]: DEBUG nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 777.642374] env[61629]: DEBUG nova.network.neutron [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 777.657192] env[61629]: DEBUG nova.network.neutron [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 777.692316] env[61629]: DEBUG oslo_concurrency.lockutils [req-cfc41490-a2a0-4505-ace5-3f5f20845b58 req-7e5473a4-7323-4f8f-8bd5-cfc5b8ed219c service nova] Releasing lock "refresh_cache-3b7866fb-213a-46a7-b31c-4ce5598591c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.692897] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "refresh_cache-3b7866fb-213a-46a7-b31c-4ce5598591c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.693955] env[61629]: DEBUG nova.network.neutron [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.845684] env[61629]: DEBUG nova.scheduler.client.report [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 778.159380] env[61629]: DEBUG nova.network.neutron [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.210230] env[61629]: DEBUG nova.network.neutron [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.300636] env[61629]: DEBUG nova.network.neutron [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.349723] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.848s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.350374] env[61629]: ERROR nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 35513a37-8c02-451a-85d7-dc0055751458, please check neutron logs for more information. [ 778.350374] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Traceback (most recent call last): [ 778.350374] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 778.350374] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] self.driver.spawn(context, instance, image_meta, [ 778.350374] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 778.350374] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] self._vmops.spawn(context, instance, image_meta, injected_files, [ 778.350374] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 778.350374] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] vm_ref = self.build_virtual_machine(instance, [ 778.350374] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 778.350374] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] vif_infos = vmwarevif.get_vif_info(self._session, [ 778.350374] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 778.350787] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] for vif in network_info: [ 778.350787] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 778.350787] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] return self._sync_wrapper(fn, *args, **kwargs) [ 778.350787] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 778.350787] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] self.wait() [ 778.350787] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 778.350787] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] self[:] = self._gt.wait() [ 778.350787] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 778.350787] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] return self._exit_event.wait() [ 778.350787] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 778.350787] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] current.throw(*self._exc) [ 778.350787] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 778.350787] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] result = function(*args, **kwargs) [ 778.351290] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 778.351290] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] return func(*args, **kwargs) [ 778.351290] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 778.351290] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] raise e [ 778.351290] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 778.351290] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] nwinfo = self.network_api.allocate_for_instance( [ 778.351290] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 778.351290] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] created_port_ids = self._update_ports_for_instance( [ 778.351290] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 778.351290] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] with excutils.save_and_reraise_exception(): [ 778.351290] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 778.351290] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] self.force_reraise() [ 778.351290] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 778.351754] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] raise self.value [ 778.351754] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 778.351754] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] updated_port = self._update_port( [ 778.351754] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 778.351754] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] _ensure_no_port_binding_failure(port) [ 778.351754] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 778.351754] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] raise exception.PortBindingFailed(port_id=port['id']) [ 778.351754] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] nova.exception.PortBindingFailed: Binding failed for port 35513a37-8c02-451a-85d7-dc0055751458, please check neutron logs for more information. [ 778.351754] env[61629]: ERROR nova.compute.manager [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] [ 778.351754] env[61629]: DEBUG nova.compute.utils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Binding failed for port 35513a37-8c02-451a-85d7-dc0055751458, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 778.352229] env[61629]: DEBUG oslo_concurrency.lockutils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.660s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.353645] env[61629]: INFO nova.compute.claims [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.356975] env[61629]: DEBUG nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Build of instance d013c1e1-952a-4b76-a44d-8499f5159c42 was re-scheduled: Binding failed for port 35513a37-8c02-451a-85d7-dc0055751458, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 778.357404] env[61629]: DEBUG nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 778.357629] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquiring lock "refresh_cache-d013c1e1-952a-4b76-a44d-8499f5159c42" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.357778] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquired lock "refresh_cache-d013c1e1-952a-4b76-a44d-8499f5159c42" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.357937] env[61629]: DEBUG nova.network.neutron [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 778.545934] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquiring lock "68c1e93a-2829-4764-a900-75c3479b4715" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.546196] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "68c1e93a-2829-4764-a900-75c3479b4715" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.580060] env[61629]: DEBUG nova.compute.manager [req-6a92e535-2990-4508-92c3-6d72f26531f9 req-473fc852-d825-43e2-aa5e-bc9d3c612b0d service nova] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Received event network-vif-deleted-2e13df7b-e819-486c-a526-1917db0db79a {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 778.664029] env[61629]: INFO nova.compute.manager [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 443ad254-3d5d-4fb8-a565-ce70c352e3f2] Took 1.02 seconds to deallocate network for instance. [ 778.803259] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "refresh_cache-3b7866fb-213a-46a7-b31c-4ce5598591c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.803589] env[61629]: DEBUG nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 778.803821] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 778.804129] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-43d4cf82-08d5-4d77-98c7-9f3a07cac8cf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.813258] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4c340c-d071-4071-b66d-535fc2cba9dd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.834994] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3b7866fb-213a-46a7-b31c-4ce5598591c4 could not be found. [ 778.835224] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 778.835447] env[61629]: INFO nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Took 0.03 seconds to destroy the instance on the hypervisor. [ 778.835648] env[61629]: DEBUG oslo.service.loopingcall [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 778.835934] env[61629]: DEBUG nova.compute.manager [-] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 778.835934] env[61629]: DEBUG nova.network.neutron [-] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 778.859771] env[61629]: DEBUG nova.network.neutron [-] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.874891] env[61629]: DEBUG nova.network.neutron [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.929899] env[61629]: DEBUG nova.network.neutron [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.363537] env[61629]: DEBUG nova.network.neutron [-] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.432083] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Releasing lock "refresh_cache-d013c1e1-952a-4b76-a44d-8499f5159c42" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.432308] env[61629]: DEBUG nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 779.432477] env[61629]: DEBUG nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 779.432640] env[61629]: DEBUG nova.network.neutron [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 779.447276] env[61629]: DEBUG nova.network.neutron [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.598930] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8162b72e-ca32-4ab1-b905-0d7cdee66bab {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.606465] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041944d8-c7fb-48a8-8414-bc0a76352e43 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.635402] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66acf4c8-9066-4cf0-8a6f-fbc82f5b502d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.642400] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830d77cc-a980-45c0-8c1b-1252bf335846 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.655253] env[61629]: DEBUG nova.compute.provider_tree [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.690292] env[61629]: INFO nova.scheduler.client.report [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Deleted allocations for instance 443ad254-3d5d-4fb8-a565-ce70c352e3f2 [ 779.868801] env[61629]: INFO nova.compute.manager [-] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Took 1.03 seconds to deallocate network for instance. [ 779.871336] env[61629]: DEBUG nova.compute.claims [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 779.871517] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.950329] env[61629]: DEBUG nova.network.neutron [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.158737] env[61629]: DEBUG nova.scheduler.client.report [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 780.198226] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fe8dfd4-c8d5-45ab-914c-6ceff964b8f4 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "443ad254-3d5d-4fb8-a565-ce70c352e3f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 155.489s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.452859] env[61629]: INFO nova.compute.manager [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: d013c1e1-952a-4b76-a44d-8499f5159c42] Took 1.02 seconds to deallocate network for instance. [ 780.664415] env[61629]: DEBUG oslo_concurrency.lockutils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.312s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.664967] env[61629]: DEBUG nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 780.667518] env[61629]: DEBUG oslo_concurrency.lockutils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.107s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.701535] env[61629]: DEBUG nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 781.172534] env[61629]: DEBUG nova.compute.utils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 781.178245] env[61629]: DEBUG nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 781.178455] env[61629]: DEBUG nova.network.neutron [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 781.221745] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.226508] env[61629]: DEBUG nova.policy [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5aa41ffdc34047e99044d38f5e7ac493', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a215b62f5b8440559fb3861e966c9535', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 781.487244] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5365a7cd-0b85-4a01-94f7-323b984baa22 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.490489] env[61629]: INFO nova.scheduler.client.report [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Deleted allocations for instance d013c1e1-952a-4b76-a44d-8499f5159c42 [ 781.501321] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0dbfb5-c37d-4d5d-a0ca-668678543755 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.538718] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11fbe7ce-4661-4bdf-bc3a-253b443eebea {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.544616] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f2f5b2-7734-4bc5-b8f0-d512a18cf0e9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.549544] env[61629]: DEBUG nova.network.neutron [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Successfully created port: ed92b04d-8f51-4520-ac29-2149f7a8fdab {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 781.561906] env[61629]: DEBUG nova.compute.provider_tree [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.681843] env[61629]: DEBUG nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 781.998843] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bafec945-931a-420b-a3f0-14a2c3f3d7fc tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "d013c1e1-952a-4b76-a44d-8499f5159c42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 146.647s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.067852] env[61629]: DEBUG nova.scheduler.client.report [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 782.502469] env[61629]: DEBUG nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 782.571766] env[61629]: DEBUG oslo_concurrency.lockutils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.904s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.572560] env[61629]: ERROR nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 939df38e-b1dd-4f13-afa2-3a0385b51db1, please check neutron logs for more information. [ 782.572560] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] Traceback (most recent call last): [ 782.572560] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 782.572560] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] self.driver.spawn(context, instance, image_meta, [ 782.572560] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 782.572560] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] self._vmops.spawn(context, instance, image_meta, injected_files, [ 782.572560] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 782.572560] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] vm_ref = self.build_virtual_machine(instance, [ 782.572560] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 782.572560] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] vif_infos = vmwarevif.get_vif_info(self._session, [ 782.572560] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 782.572844] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] for vif in network_info: [ 782.572844] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 782.572844] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] return self._sync_wrapper(fn, *args, **kwargs) [ 782.572844] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 782.572844] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] self.wait() [ 782.572844] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 782.572844] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] self[:] = self._gt.wait() [ 782.572844] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 782.572844] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] return self._exit_event.wait() [ 782.572844] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 782.572844] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] result = hub.switch() [ 782.572844] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 782.572844] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] return self.greenlet.switch() [ 782.573108] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 782.573108] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] result = function(*args, **kwargs) [ 782.573108] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 782.573108] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] return func(*args, **kwargs) [ 782.573108] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 782.573108] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] raise e [ 782.573108] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 782.573108] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] nwinfo = self.network_api.allocate_for_instance( [ 782.573108] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 782.573108] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] created_port_ids = self._update_ports_for_instance( [ 782.573108] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 782.573108] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] with excutils.save_and_reraise_exception(): [ 782.573108] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 782.573352] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] self.force_reraise() [ 782.573352] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 782.573352] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] raise self.value [ 782.573352] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 782.573352] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] updated_port = self._update_port( [ 782.573352] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 782.573352] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] _ensure_no_port_binding_failure(port) [ 782.573352] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 782.573352] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] raise exception.PortBindingFailed(port_id=port['id']) [ 782.573352] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] nova.exception.PortBindingFailed: Binding failed for port 939df38e-b1dd-4f13-afa2-3a0385b51db1, please check neutron logs for more information. [ 782.573352] env[61629]: ERROR nova.compute.manager [instance: f5830e36-257a-418a-add6-01195bb7d103] [ 782.573612] env[61629]: DEBUG nova.compute.utils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Binding failed for port 939df38e-b1dd-4f13-afa2-3a0385b51db1, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 782.574874] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.608s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.579429] env[61629]: DEBUG nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Build of instance f5830e36-257a-418a-add6-01195bb7d103 was re-scheduled: Binding failed for port 939df38e-b1dd-4f13-afa2-3a0385b51db1, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 782.580069] env[61629]: DEBUG nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 782.580385] env[61629]: DEBUG oslo_concurrency.lockutils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Acquiring lock "refresh_cache-f5830e36-257a-418a-add6-01195bb7d103" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.580552] env[61629]: DEBUG oslo_concurrency.lockutils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Acquired lock "refresh_cache-f5830e36-257a-418a-add6-01195bb7d103" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.580750] env[61629]: DEBUG nova.network.neutron [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 782.635781] env[61629]: DEBUG nova.compute.manager [req-cbe6b4a0-e8a9-44de-8cb8-922ff7bf61ab req-0b19e840-bb53-4d51-ab97-7c1b85206a8e service nova] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Received event network-changed-ed92b04d-8f51-4520-ac29-2149f7a8fdab {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 782.635968] env[61629]: DEBUG nova.compute.manager [req-cbe6b4a0-e8a9-44de-8cb8-922ff7bf61ab req-0b19e840-bb53-4d51-ab97-7c1b85206a8e service nova] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Refreshing instance network info cache due to event network-changed-ed92b04d-8f51-4520-ac29-2149f7a8fdab. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 782.636196] env[61629]: DEBUG oslo_concurrency.lockutils [req-cbe6b4a0-e8a9-44de-8cb8-922ff7bf61ab req-0b19e840-bb53-4d51-ab97-7c1b85206a8e service nova] Acquiring lock "refresh_cache-c3724b2e-4f6b-4db5-a68f-41e410e561e9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.636333] env[61629]: DEBUG oslo_concurrency.lockutils [req-cbe6b4a0-e8a9-44de-8cb8-922ff7bf61ab req-0b19e840-bb53-4d51-ab97-7c1b85206a8e service nova] Acquired lock "refresh_cache-c3724b2e-4f6b-4db5-a68f-41e410e561e9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.636482] env[61629]: DEBUG nova.network.neutron [req-cbe6b4a0-e8a9-44de-8cb8-922ff7bf61ab req-0b19e840-bb53-4d51-ab97-7c1b85206a8e service nova] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Refreshing network info cache for port ed92b04d-8f51-4520-ac29-2149f7a8fdab {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 782.694782] env[61629]: DEBUG nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 782.719801] env[61629]: DEBUG nova.virt.hardware [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 782.720053] env[61629]: DEBUG nova.virt.hardware [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 782.720212] env[61629]: DEBUG nova.virt.hardware [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 782.720460] env[61629]: DEBUG nova.virt.hardware [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 782.720520] env[61629]: DEBUG nova.virt.hardware [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 782.720668] env[61629]: DEBUG nova.virt.hardware [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 782.720871] env[61629]: DEBUG nova.virt.hardware [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 782.721046] env[61629]: DEBUG nova.virt.hardware [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 782.721221] env[61629]: DEBUG nova.virt.hardware [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 782.721383] env[61629]: DEBUG nova.virt.hardware [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 782.721549] env[61629]: DEBUG nova.virt.hardware [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 782.722448] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b7abb9-65a4-401c-9ba9-07d07c049a86 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.730219] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6230e08-040e-4665-a936-8d8fad0a8b3e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.865506] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquiring lock "08cb71f4-2ebe-4694-856c-2e772f319cdf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.865803] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "08cb71f4-2ebe-4694-856c-2e772f319cdf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.905229] env[61629]: ERROR nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ed92b04d-8f51-4520-ac29-2149f7a8fdab, please check neutron logs for more information. [ 782.905229] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 782.905229] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 782.905229] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 782.905229] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 782.905229] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 782.905229] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 782.905229] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 782.905229] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 782.905229] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 782.905229] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 782.905229] env[61629]: ERROR nova.compute.manager raise self.value [ 782.905229] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 782.905229] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 782.905229] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 782.905229] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 782.905917] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 782.905917] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 782.905917] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ed92b04d-8f51-4520-ac29-2149f7a8fdab, please check neutron logs for more information. [ 782.905917] env[61629]: ERROR nova.compute.manager [ 782.905917] env[61629]: Traceback (most recent call last): [ 782.905917] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 782.905917] env[61629]: listener.cb(fileno) [ 782.905917] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 782.905917] env[61629]: result = function(*args, **kwargs) [ 782.905917] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 782.905917] env[61629]: return func(*args, **kwargs) [ 782.905917] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 782.905917] env[61629]: raise e [ 782.905917] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 782.905917] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 782.905917] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 782.905917] env[61629]: created_port_ids = self._update_ports_for_instance( [ 782.905917] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 782.905917] env[61629]: with excutils.save_and_reraise_exception(): [ 782.905917] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 782.905917] env[61629]: self.force_reraise() [ 782.905917] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 782.905917] env[61629]: raise self.value [ 782.905917] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 782.905917] env[61629]: updated_port = self._update_port( [ 782.905917] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 782.905917] env[61629]: _ensure_no_port_binding_failure(port) [ 782.905917] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 782.905917] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 782.906935] env[61629]: nova.exception.PortBindingFailed: Binding failed for port ed92b04d-8f51-4520-ac29-2149f7a8fdab, please check neutron logs for more information. [ 782.906935] env[61629]: Removing descriptor: 15 [ 782.906935] env[61629]: ERROR nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ed92b04d-8f51-4520-ac29-2149f7a8fdab, please check neutron logs for more information. [ 782.906935] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Traceback (most recent call last): [ 782.906935] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 782.906935] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] yield resources [ 782.906935] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 782.906935] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] self.driver.spawn(context, instance, image_meta, [ 782.906935] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 782.906935] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 782.906935] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 782.906935] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] vm_ref = self.build_virtual_machine(instance, [ 782.907394] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 782.907394] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] vif_infos = vmwarevif.get_vif_info(self._session, [ 782.907394] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 782.907394] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] for vif in network_info: [ 782.907394] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 782.907394] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] return self._sync_wrapper(fn, *args, **kwargs) [ 782.907394] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 782.907394] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] self.wait() [ 782.907394] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 782.907394] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] self[:] = self._gt.wait() [ 782.907394] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 782.907394] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] return self._exit_event.wait() [ 782.907394] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 782.908133] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] result = hub.switch() [ 782.908133] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 782.908133] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] return self.greenlet.switch() [ 782.908133] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 782.908133] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] result = function(*args, **kwargs) [ 782.908133] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 782.908133] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] return func(*args, **kwargs) [ 782.908133] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 782.908133] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] raise e [ 782.908133] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 782.908133] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] nwinfo = self.network_api.allocate_for_instance( [ 782.908133] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 782.908133] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] created_port_ids = self._update_ports_for_instance( [ 782.908415] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 782.908415] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] with excutils.save_and_reraise_exception(): [ 782.908415] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 782.908415] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] self.force_reraise() [ 782.908415] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 782.908415] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] raise self.value [ 782.908415] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 782.908415] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] updated_port = self._update_port( [ 782.908415] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 782.908415] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] _ensure_no_port_binding_failure(port) [ 782.908415] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 782.908415] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] raise exception.PortBindingFailed(port_id=port['id']) [ 782.908642] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] nova.exception.PortBindingFailed: Binding failed for port ed92b04d-8f51-4520-ac29-2149f7a8fdab, please check neutron logs for more information. [ 782.908642] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] [ 782.908642] env[61629]: INFO nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Terminating instance [ 782.908642] env[61629]: DEBUG oslo_concurrency.lockutils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Acquiring lock "refresh_cache-c3724b2e-4f6b-4db5-a68f-41e410e561e9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.025286] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.102193] env[61629]: DEBUG nova.network.neutron [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.140988] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "09890839-b1d9-4558-992d-b1a6f4c5f750" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.141239] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "09890839-b1d9-4558-992d-b1a6f4c5f750" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.156570] env[61629]: DEBUG nova.network.neutron [req-cbe6b4a0-e8a9-44de-8cb8-922ff7bf61ab req-0b19e840-bb53-4d51-ab97-7c1b85206a8e service nova] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.168090] env[61629]: DEBUG nova.network.neutron [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.225572] env[61629]: DEBUG nova.network.neutron [req-cbe6b4a0-e8a9-44de-8cb8-922ff7bf61ab req-0b19e840-bb53-4d51-ab97-7c1b85206a8e service nova] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.321053] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bed214f-0684-492a-8571-6328401cd812 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.328554] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a4db45-1da2-4a2a-9f2b-d2a8c5ed0613 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.357244] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6a9265-09c7-4d0b-a108-e0a226ea5043 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.364069] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2277760-f033-4c82-8c85-2bb20a554bd7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.376913] env[61629]: DEBUG nova.compute.provider_tree [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.670142] env[61629]: DEBUG oslo_concurrency.lockutils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Releasing lock "refresh_cache-f5830e36-257a-418a-add6-01195bb7d103" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.670457] env[61629]: DEBUG nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 783.670629] env[61629]: DEBUG nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 783.670838] env[61629]: DEBUG nova.network.neutron [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 783.685742] env[61629]: DEBUG nova.network.neutron [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.727977] env[61629]: DEBUG oslo_concurrency.lockutils [req-cbe6b4a0-e8a9-44de-8cb8-922ff7bf61ab req-0b19e840-bb53-4d51-ab97-7c1b85206a8e service nova] Releasing lock "refresh_cache-c3724b2e-4f6b-4db5-a68f-41e410e561e9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.728353] env[61629]: DEBUG oslo_concurrency.lockutils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Acquired lock "refresh_cache-c3724b2e-4f6b-4db5-a68f-41e410e561e9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.728532] env[61629]: DEBUG nova.network.neutron [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 783.879796] env[61629]: DEBUG nova.scheduler.client.report [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 784.188716] env[61629]: DEBUG nova.network.neutron [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.244511] env[61629]: DEBUG nova.network.neutron [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.314609] env[61629]: DEBUG nova.network.neutron [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.385570] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.811s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.386222] env[61629]: ERROR nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5f5e81fd-f5d2-4652-a23f-3e4a7ca1d8aa, please check neutron logs for more information. [ 784.386222] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Traceback (most recent call last): [ 784.386222] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 784.386222] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] self.driver.spawn(context, instance, image_meta, [ 784.386222] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 784.386222] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 784.386222] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 784.386222] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] vm_ref = self.build_virtual_machine(instance, [ 784.386222] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 784.386222] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] vif_infos = vmwarevif.get_vif_info(self._session, [ 784.386222] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 784.386501] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] for vif in network_info: [ 784.386501] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 784.386501] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] return self._sync_wrapper(fn, *args, **kwargs) [ 784.386501] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 784.386501] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] self.wait() [ 784.386501] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 784.386501] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] self[:] = self._gt.wait() [ 784.386501] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 784.386501] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] return self._exit_event.wait() [ 784.386501] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 784.386501] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] current.throw(*self._exc) [ 784.386501] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 784.386501] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] result = function(*args, **kwargs) [ 784.386796] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 784.386796] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] return func(*args, **kwargs) [ 784.386796] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 784.386796] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] raise e [ 784.386796] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 784.386796] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] nwinfo = self.network_api.allocate_for_instance( [ 784.386796] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 784.386796] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] created_port_ids = self._update_ports_for_instance( [ 784.386796] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 784.386796] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] with excutils.save_and_reraise_exception(): [ 784.386796] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.386796] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] self.force_reraise() [ 784.386796] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.387106] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] raise self.value [ 784.387106] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 784.387106] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] updated_port = self._update_port( [ 784.387106] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.387106] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] _ensure_no_port_binding_failure(port) [ 784.387106] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.387106] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] raise exception.PortBindingFailed(port_id=port['id']) [ 784.387106] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] nova.exception.PortBindingFailed: Binding failed for port 5f5e81fd-f5d2-4652-a23f-3e4a7ca1d8aa, please check neutron logs for more information. [ 784.387106] env[61629]: ERROR nova.compute.manager [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] [ 784.387106] env[61629]: DEBUG nova.compute.utils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Binding failed for port 5f5e81fd-f5d2-4652-a23f-3e4a7ca1d8aa, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 784.388172] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.392s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.390918] env[61629]: DEBUG nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Build of instance 52816a66-442f-4869-aee3-0cebd6f5e9bf was re-scheduled: Binding failed for port 5f5e81fd-f5d2-4652-a23f-3e4a7ca1d8aa, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 784.391331] env[61629]: DEBUG nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 784.391548] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Acquiring lock "refresh_cache-52816a66-442f-4869-aee3-0cebd6f5e9bf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.391716] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Acquired lock "refresh_cache-52816a66-442f-4869-aee3-0cebd6f5e9bf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.391836] env[61629]: DEBUG nova.network.neutron [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 784.691755] env[61629]: INFO nova.compute.manager [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: f5830e36-257a-418a-add6-01195bb7d103] Took 1.02 seconds to deallocate network for instance. [ 784.735668] env[61629]: DEBUG nova.compute.manager [req-5be1ddf7-0c48-4d8f-ade6-aad328c8a20f req-00263154-6db2-44a9-beed-ec301969ea98 service nova] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Received event network-vif-deleted-ed92b04d-8f51-4520-ac29-2149f7a8fdab {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 784.817334] env[61629]: DEBUG oslo_concurrency.lockutils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Releasing lock "refresh_cache-c3724b2e-4f6b-4db5-a68f-41e410e561e9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 784.817799] env[61629]: DEBUG nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 784.817990] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 784.818319] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-46f1d1c9-182e-4cf7-89bf-ac3e338b5e14 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.827592] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b07b83c-4501-48ad-b470-5dad8b8bb68d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.848678] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c3724b2e-4f6b-4db5-a68f-41e410e561e9 could not be found. [ 784.848896] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 784.849096] env[61629]: INFO nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Took 0.03 seconds to destroy the instance on the hypervisor. [ 784.849352] env[61629]: DEBUG oslo.service.loopingcall [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 784.849580] env[61629]: DEBUG nova.compute.manager [-] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 784.849674] env[61629]: DEBUG nova.network.neutron [-] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 784.915086] env[61629]: DEBUG nova.network.neutron [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.938944] env[61629]: DEBUG nova.network.neutron [-] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.985224] env[61629]: DEBUG nova.network.neutron [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.164570] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0332b4a9-9346-4d63-99ba-d22cfcb99a0f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.171301] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50954dbe-3cd9-4046-803f-79fbe0685c6f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.204064] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9516952d-5ed9-44e5-81f0-60bdc645d93f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.211807] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f097bd1-f162-4de2-af88-58c504a8a4ff {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.896621] env[61629]: DEBUG nova.network.neutron [-] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.900750] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Releasing lock "refresh_cache-52816a66-442f-4869-aee3-0cebd6f5e9bf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.900750] env[61629]: DEBUG nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 785.900750] env[61629]: DEBUG nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 785.900750] env[61629]: DEBUG nova.network.neutron [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 785.914690] env[61629]: DEBUG nova.compute.provider_tree [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.918024] env[61629]: INFO nova.scheduler.client.report [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Deleted allocations for instance f5830e36-257a-418a-add6-01195bb7d103 [ 785.923473] env[61629]: DEBUG nova.network.neutron [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.402950] env[61629]: INFO nova.compute.manager [-] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Took 1.55 seconds to deallocate network for instance. [ 786.405956] env[61629]: DEBUG nova.compute.claims [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 786.406195] env[61629]: DEBUG oslo_concurrency.lockutils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.417545] env[61629]: DEBUG nova.scheduler.client.report [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 786.427358] env[61629]: DEBUG oslo_concurrency.lockutils [None req-415d7b48-5c48-4267-b413-34e87fb7004d tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Lock "f5830e36-257a-418a-add6-01195bb7d103" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 149.708s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.427578] env[61629]: DEBUG nova.network.neutron [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.922737] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.534s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.923405] env[61629]: ERROR nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 50771be9-1b74-455d-823d-98060158af48, please check neutron logs for more information. [ 786.923405] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Traceback (most recent call last): [ 786.923405] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 786.923405] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] self.driver.spawn(context, instance, image_meta, [ 786.923405] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 786.923405] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] self._vmops.spawn(context, instance, image_meta, injected_files, [ 786.923405] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 786.923405] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] vm_ref = self.build_virtual_machine(instance, [ 786.923405] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 786.923405] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] vif_infos = vmwarevif.get_vif_info(self._session, [ 786.923405] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 786.923890] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] for vif in network_info: [ 786.923890] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 786.923890] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] return self._sync_wrapper(fn, *args, **kwargs) [ 786.923890] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 786.923890] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] self.wait() [ 786.923890] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 786.923890] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] self[:] = self._gt.wait() [ 786.923890] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 786.923890] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] return self._exit_event.wait() [ 786.923890] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 786.923890] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] current.throw(*self._exc) [ 786.923890] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 786.923890] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] result = function(*args, **kwargs) [ 786.924411] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 786.924411] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] return func(*args, **kwargs) [ 786.924411] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 786.924411] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] raise e [ 786.924411] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 786.924411] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] nwinfo = self.network_api.allocate_for_instance( [ 786.924411] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 786.924411] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] created_port_ids = self._update_ports_for_instance( [ 786.924411] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 786.924411] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] with excutils.save_and_reraise_exception(): [ 786.924411] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.924411] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] self.force_reraise() [ 786.924411] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.924906] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] raise self.value [ 786.924906] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 786.924906] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] updated_port = self._update_port( [ 786.924906] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.924906] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] _ensure_no_port_binding_failure(port) [ 786.924906] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.924906] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] raise exception.PortBindingFailed(port_id=port['id']) [ 786.924906] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] nova.exception.PortBindingFailed: Binding failed for port 50771be9-1b74-455d-823d-98060158af48, please check neutron logs for more information. [ 786.924906] env[61629]: ERROR nova.compute.manager [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] [ 786.924906] env[61629]: DEBUG nova.compute.utils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Binding failed for port 50771be9-1b74-455d-823d-98060158af48, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 786.925351] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.529s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.928664] env[61629]: DEBUG nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Build of instance fa8a181b-2170-4c38-98d6-adc4e5a80f94 was re-scheduled: Binding failed for port 50771be9-1b74-455d-823d-98060158af48, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 786.928664] env[61629]: DEBUG nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 786.928664] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Acquiring lock "refresh_cache-fa8a181b-2170-4c38-98d6-adc4e5a80f94" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.928664] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Acquired lock "refresh_cache-fa8a181b-2170-4c38-98d6-adc4e5a80f94" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.928875] env[61629]: DEBUG nova.network.neutron [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 786.929558] env[61629]: INFO nova.compute.manager [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] [instance: 52816a66-442f-4869-aee3-0cebd6f5e9bf] Took 1.03 seconds to deallocate network for instance. [ 786.931753] env[61629]: DEBUG nova.compute.manager [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 787.445834] env[61629]: DEBUG nova.network.neutron [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.452731] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.516613] env[61629]: DEBUG nova.network.neutron [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.959568] env[61629]: INFO nova.scheduler.client.report [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Deleted allocations for instance 52816a66-442f-4869-aee3-0cebd6f5e9bf [ 788.019587] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Releasing lock "refresh_cache-fa8a181b-2170-4c38-98d6-adc4e5a80f94" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.019835] env[61629]: DEBUG nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 788.020029] env[61629]: DEBUG nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 788.020206] env[61629]: DEBUG nova.network.neutron [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 788.035509] env[61629]: DEBUG nova.network.neutron [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.454505] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 52816a66-442f-4869-aee3-0cebd6f5e9bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 788.469778] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bee74f51-2fa2-4000-816d-4139c3b02950 tempest-ListImageFiltersTestJSON-958178463 tempest-ListImageFiltersTestJSON-958178463-project-member] Lock "52816a66-442f-4869-aee3-0cebd6f5e9bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 151.449s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.538296] env[61629]: DEBUG nova.network.neutron [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.958881] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance fa8a181b-2170-4c38-98d6-adc4e5a80f94 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 788.959089] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 853f3cd8-c874-45e8-9e89-ee897dea87a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 788.959185] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 3b7866fb-213a-46a7-b31c-4ce5598591c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 788.959306] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance c3724b2e-4f6b-4db5-a68f-41e410e561e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 788.971926] env[61629]: DEBUG nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 789.040627] env[61629]: INFO nova.compute.manager [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] [instance: fa8a181b-2170-4c38-98d6-adc4e5a80f94] Took 1.02 seconds to deallocate network for instance. [ 789.462364] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance fe6adbf6-be78-45ee-a136-b7e538fb124b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 789.582967] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.965525] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 355aa564-3067-4a3c-92de-4ab6e2b8fa6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 790.074060] env[61629]: INFO nova.scheduler.client.report [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Deleted allocations for instance fa8a181b-2170-4c38-98d6-adc4e5a80f94 [ 790.469754] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 28af8dc5-0817-43e7-bce0-3491971efb0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 790.586734] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b27f2ba5-fe21-4d17-b059-7ecba4f36c26 tempest-ServerMetadataTestJSON-563360671 tempest-ServerMetadataTestJSON-563360671-project-member] Lock "fa8a181b-2170-4c38-98d6-adc4e5a80f94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.725s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.691211] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.691443] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.926777] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "d37958f8-7607-418b-9cfd-c3a5df721e94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.927028] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "d37958f8-7607-418b-9cfd-c3a5df721e94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.971626] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance c5c6854c-1fe6-46e7-aee7-6a5e00d6027c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.088537] env[61629]: DEBUG nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 791.236189] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "da1eb7f9-7562-40c8-955b-c11f831b7bc8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.236431] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "da1eb7f9-7562-40c8-955b-c11f831b7bc8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.474473] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance b8cfaef2-5f78-4026-90b8-fe2adacd61e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 791.610946] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.979065] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 76f08ac6-bb83-4d61-9707-b602028c54f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 792.483346] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 3cabd3ef-590a-41f3-a611-3d27b4853db5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 792.987171] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance c1bb3820-0c77-4a7e-bcce-17d5e6793ab9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 793.490052] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance dce0c7e1-1e47-49ad-88f7-f8f5e293d239 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 793.993668] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 794.497471] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance edb4e0f6-57ad-48cf-aa20-3b2549bff3fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.000693] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance c3f830d6-8999-49d5-a431-b09dfdaf8313 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.508902] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 2b01eeae-64be-44b3-b4cf-c2a8490043e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 796.013136] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 68c1e93a-2829-4764-a900-75c3479b4715 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 796.227364] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "87a1383f-d66b-4bde-b153-89ac62ff8390" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.227595] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "87a1383f-d66b-4bde-b153-89ac62ff8390" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.519176] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 08cb71f4-2ebe-4694-856c-2e772f319cdf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 796.578887] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "0d21b352-bdd0-4887-8658-cd5c448352d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.579368] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "0d21b352-bdd0-4887-8658-cd5c448352d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.021990] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 09890839-b1d9-4558-992d-b1a6f4c5f750 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 797.022287] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 797.022442] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 797.041060] env[61629]: DEBUG nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Refreshing inventories for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 797.057226] env[61629]: DEBUG nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Updating ProviderTree inventory for provider d075eff1-6f77-44a8-824e-16f3e03b4063 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 797.057411] env[61629]: DEBUG nova.compute.provider_tree [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 797.069255] env[61629]: DEBUG nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Refreshing aggregate associations for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063, aggregates: None {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 797.088554] env[61629]: DEBUG nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Refreshing trait associations for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 797.375633] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d704b094-8a48-4501-9272-794ed6ca7c89 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.385171] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bfc2a4e-c561-46ae-a370-57a8c46bd6e4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.414502] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1b8ddc-90fa-4d29-9ae0-40416b920959 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.421376] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5c342b-b6b6-41f7-ae3c-baa9bd329cea {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.433610] env[61629]: DEBUG nova.compute.provider_tree [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.936885] env[61629]: DEBUG nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 798.443343] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61629) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 798.443588] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 11.518s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.443886] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.433s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.447675] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.447832] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Cleaning up deleted instances {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 798.954677] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] There are 5 instances to clean {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 798.954952] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 2315bd37-6151-42d7-8b54-9ee367be0ed1] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 799.210566] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed908f88-d681-437b-9829-a0a3c3b8970f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.218153] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b02903-8459-47d9-b551-2bbb681794d8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.247054] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5152d6c4-4412-474a-8ffc-918dd4c1a425 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.254881] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613f29d4-0133-4a67-a00f-a39f444d9d24 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.267864] env[61629]: DEBUG nova.compute.provider_tree [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.458067] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: dd406dd1-0e19-400b-a862-ae51fd134017] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 799.771632] env[61629]: DEBUG nova.scheduler.client.report [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 799.961154] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 733343f7-99e2-4e07-94eb-1b66458d799a] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 800.276431] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.832s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.277083] env[61629]: ERROR nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8e84f788-48f9-4e4e-9537-d543277d505e, please check neutron logs for more information. [ 800.277083] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Traceback (most recent call last): [ 800.277083] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 800.277083] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] self.driver.spawn(context, instance, image_meta, [ 800.277083] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 800.277083] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 800.277083] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 800.277083] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] vm_ref = self.build_virtual_machine(instance, [ 800.277083] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 800.277083] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] vif_infos = vmwarevif.get_vif_info(self._session, [ 800.277083] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 800.277352] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] for vif in network_info: [ 800.277352] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 800.277352] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] return self._sync_wrapper(fn, *args, **kwargs) [ 800.277352] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 800.277352] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] self.wait() [ 800.277352] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 800.277352] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] self[:] = self._gt.wait() [ 800.277352] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 800.277352] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] return self._exit_event.wait() [ 800.277352] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 800.277352] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] current.throw(*self._exc) [ 800.277352] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 800.277352] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] result = function(*args, **kwargs) [ 800.277740] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 800.277740] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] return func(*args, **kwargs) [ 800.277740] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 800.277740] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] raise e [ 800.277740] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 800.277740] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] nwinfo = self.network_api.allocate_for_instance( [ 800.277740] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 800.277740] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] created_port_ids = self._update_ports_for_instance( [ 800.277740] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 800.277740] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] with excutils.save_and_reraise_exception(): [ 800.277740] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 800.277740] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] self.force_reraise() [ 800.277740] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 800.279485] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] raise self.value [ 800.279485] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 800.279485] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] updated_port = self._update_port( [ 800.279485] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 800.279485] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] _ensure_no_port_binding_failure(port) [ 800.279485] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 800.279485] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] raise exception.PortBindingFailed(port_id=port['id']) [ 800.279485] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] nova.exception.PortBindingFailed: Binding failed for port 8e84f788-48f9-4e4e-9537-d543277d505e, please check neutron logs for more information. [ 800.279485] env[61629]: ERROR nova.compute.manager [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] [ 800.279485] env[61629]: DEBUG nova.compute.utils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Binding failed for port 8e84f788-48f9-4e4e-9537-d543277d505e, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 800.279743] env[61629]: DEBUG nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Build of instance 853f3cd8-c874-45e8-9e89-ee897dea87a3 was re-scheduled: Binding failed for port 8e84f788-48f9-4e4e-9537-d543277d505e, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 800.280018] env[61629]: DEBUG nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 800.280255] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Acquiring lock "refresh_cache-853f3cd8-c874-45e8-9e89-ee897dea87a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.280402] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Acquired lock "refresh_cache-853f3cd8-c874-45e8-9e89-ee897dea87a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.280556] env[61629]: DEBUG nova.network.neutron [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 800.281825] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.078s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.283681] env[61629]: INFO nova.compute.claims [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.465049] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 33029a57-19d2-45eb-b4ec-f50c47d3dc12] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 800.802671] env[61629]: DEBUG nova.network.neutron [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.897535] env[61629]: DEBUG nova.network.neutron [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.967827] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 113fe8e6-bc12-41fe-a405-cec2aa1a717e] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 801.401720] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Releasing lock "refresh_cache-853f3cd8-c874-45e8-9e89-ee897dea87a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.401720] env[61629]: DEBUG nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 801.401720] env[61629]: DEBUG nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 801.401720] env[61629]: DEBUG nova.network.neutron [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 801.415676] env[61629]: DEBUG nova.network.neutron [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 801.470869] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.471136] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Cleaning up deleted instances with incomplete migration {{(pid=61629) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 801.690670] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc33fac-1123-46ad-8932-27bb9f0fc8a8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.698313] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95fed66e-f22f-418a-936f-21263c3e5513 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.727335] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6861ef-3486-477b-85d5-b0ea45ab02ee {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.734923] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ac73f8-1066-4d14-862d-ba7dbce92d1b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.748084] env[61629]: DEBUG nova.compute.provider_tree [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.918189] env[61629]: DEBUG nova.network.neutron [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.974042] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.251174] env[61629]: DEBUG nova.scheduler.client.report [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 802.421260] env[61629]: INFO nova.compute.manager [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] [instance: 853f3cd8-c874-45e8-9e89-ee897dea87a3] Took 1.02 seconds to deallocate network for instance. [ 802.755955] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.756408] env[61629]: DEBUG nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 802.759183] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.728s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.760907] env[61629]: INFO nova.compute.claims [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 803.266145] env[61629]: DEBUG nova.compute.utils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 803.270057] env[61629]: DEBUG nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 803.270057] env[61629]: DEBUG nova.network.neutron [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 803.307134] env[61629]: DEBUG nova.policy [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc7564136c164fdf95295fc332a9d1a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dcf81576974d4de0a7ee3a353d27df25', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 803.451073] env[61629]: INFO nova.scheduler.client.report [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Deleted allocations for instance 853f3cd8-c874-45e8-9e89-ee897dea87a3 [ 803.665176] env[61629]: DEBUG nova.network.neutron [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Successfully created port: e42b573e-58a2-49f8-ada0-d3ef74259470 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.770906] env[61629]: DEBUG nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 803.966224] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a1415e4-d99c-4e91-a15a-8f519d9e422f tempest-ServerRescueTestJSON-447289418 tempest-ServerRescueTestJSON-447289418-project-member] Lock "853f3cd8-c874-45e8-9e89-ee897dea87a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 159.233s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.057564] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-346bd32b-6ee6-47c0-9571-3f56a90e13ed {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.065911] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6395cf2a-e3fc-4a43-bbe9-ee6b52e34c6e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.102074] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c3697a-0192-4968-9902-2e83a1b02785 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.110592] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a07be5d-2438-4b0a-9a10-08e834694929 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.123111] env[61629]: DEBUG nova.compute.provider_tree [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.283196] env[61629]: INFO nova.virt.block_device [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Booting with volume 009018f1-9b9b-4340-9f46-544040567d2a at /dev/sda [ 804.326365] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4e03adc1-a0d2-4147-96c0-2c6da02383b8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.337268] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6181c80-d426-4676-8d7b-00cf523a7e6b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.360698] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-390c645e-3c19-4e67-b2af-4348f3d7d257 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.368302] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa857e4-c6e6-4dc9-9e75-69c977122113 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.391403] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a78bcd4-2f1b-4bbd-8c02-5713293c2b70 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.397770] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1ea0a4-2948-4406-ae40-1bd3f40e76d3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.409306] env[61629]: DEBUG nova.compute.manager [req-22fb3c9c-9136-4237-9b64-9b6147ebfd09 req-4059002f-8979-41d1-9ec7-ce4bf8ffa53d service nova] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Received event network-changed-e42b573e-58a2-49f8-ada0-d3ef74259470 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 804.409517] env[61629]: DEBUG nova.compute.manager [req-22fb3c9c-9136-4237-9b64-9b6147ebfd09 req-4059002f-8979-41d1-9ec7-ce4bf8ffa53d service nova] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Refreshing instance network info cache due to event network-changed-e42b573e-58a2-49f8-ada0-d3ef74259470. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 804.409748] env[61629]: DEBUG oslo_concurrency.lockutils [req-22fb3c9c-9136-4237-9b64-9b6147ebfd09 req-4059002f-8979-41d1-9ec7-ce4bf8ffa53d service nova] Acquiring lock "refresh_cache-fe6adbf6-be78-45ee-a136-b7e538fb124b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.409890] env[61629]: DEBUG oslo_concurrency.lockutils [req-22fb3c9c-9136-4237-9b64-9b6147ebfd09 req-4059002f-8979-41d1-9ec7-ce4bf8ffa53d service nova] Acquired lock "refresh_cache-fe6adbf6-be78-45ee-a136-b7e538fb124b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.410059] env[61629]: DEBUG nova.network.neutron [req-22fb3c9c-9136-4237-9b64-9b6147ebfd09 req-4059002f-8979-41d1-9ec7-ce4bf8ffa53d service nova] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Refreshing network info cache for port e42b573e-58a2-49f8-ada0-d3ef74259470 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 804.414934] env[61629]: DEBUG nova.virt.block_device [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Updating existing volume attachment record: 39bc14c2-4f7e-49c4-a471-977fe27586b5 {{(pid=61629) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 804.471760] env[61629]: DEBUG nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 804.629379] env[61629]: DEBUG nova.scheduler.client.report [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 804.734107] env[61629]: ERROR nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e42b573e-58a2-49f8-ada0-d3ef74259470, please check neutron logs for more information. [ 804.734107] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 804.734107] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 804.734107] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 804.734107] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 804.734107] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 804.734107] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 804.734107] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 804.734107] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.734107] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 804.734107] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.734107] env[61629]: ERROR nova.compute.manager raise self.value [ 804.734107] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 804.734107] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 804.734107] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.734107] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 804.734574] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.734574] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 804.734574] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e42b573e-58a2-49f8-ada0-d3ef74259470, please check neutron logs for more information. [ 804.734574] env[61629]: ERROR nova.compute.manager [ 804.734574] env[61629]: Traceback (most recent call last): [ 804.734574] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 804.734574] env[61629]: listener.cb(fileno) [ 804.734574] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 804.734574] env[61629]: result = function(*args, **kwargs) [ 804.734574] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 804.734574] env[61629]: return func(*args, **kwargs) [ 804.734574] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 804.734574] env[61629]: raise e [ 804.734574] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 804.734574] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 804.734574] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 804.734574] env[61629]: created_port_ids = self._update_ports_for_instance( [ 804.734574] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 804.734574] env[61629]: with excutils.save_and_reraise_exception(): [ 804.734574] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.734574] env[61629]: self.force_reraise() [ 804.734574] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.734574] env[61629]: raise self.value [ 804.734574] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 804.734574] env[61629]: updated_port = self._update_port( [ 804.734574] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.734574] env[61629]: _ensure_no_port_binding_failure(port) [ 804.734574] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.734574] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 804.735318] env[61629]: nova.exception.PortBindingFailed: Binding failed for port e42b573e-58a2-49f8-ada0-d3ef74259470, please check neutron logs for more information. [ 804.735318] env[61629]: Removing descriptor: 15 [ 804.955040] env[61629]: DEBUG nova.network.neutron [req-22fb3c9c-9136-4237-9b64-9b6147ebfd09 req-4059002f-8979-41d1-9ec7-ce4bf8ffa53d service nova] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.004498] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.094289] env[61629]: DEBUG nova.network.neutron [req-22fb3c9c-9136-4237-9b64-9b6147ebfd09 req-4059002f-8979-41d1-9ec7-ce4bf8ffa53d service nova] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.135150] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.375s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.135150] env[61629]: DEBUG nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 805.141045] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.266s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.597195] env[61629]: DEBUG oslo_concurrency.lockutils [req-22fb3c9c-9136-4237-9b64-9b6147ebfd09 req-4059002f-8979-41d1-9ec7-ce4bf8ffa53d service nova] Releasing lock "refresh_cache-fe6adbf6-be78-45ee-a136-b7e538fb124b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.641908] env[61629]: DEBUG nova.compute.utils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 805.643997] env[61629]: DEBUG nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 805.643997] env[61629]: DEBUG nova.network.neutron [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 805.731049] env[61629]: DEBUG nova.policy [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b498fa56d146448185dbb2cf7524bdcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd02457d6915547d692c0c539a0539b3a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 805.953244] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98856882-f17c-4ee9-8524-875cdd3901c3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.960900] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3747b66b-d947-4e5e-832d-619c4f75502a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.990889] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfa70e9-4184-4464-b914-94e17e5996ff {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.998550] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb020ae2-6154-4b92-8fac-f97d963b6c10 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.016511] env[61629]: DEBUG nova.compute.provider_tree [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.151306] env[61629]: DEBUG nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 806.163420] env[61629]: DEBUG nova.network.neutron [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Successfully created port: 7ca8eecb-5fce-4c3d-9b83-197d53c8f97d {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 806.451242] env[61629]: DEBUG nova.compute.manager [req-abbb3fa0-df4c-4b02-b09f-1c2d4d125620 req-43ea9329-3a3d-45db-a490-d37c6056f529 service nova] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Received event network-vif-deleted-e42b573e-58a2-49f8-ada0-d3ef74259470 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 806.520282] env[61629]: DEBUG nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 806.521100] env[61629]: DEBUG nova.virt.hardware [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 806.521437] env[61629]: DEBUG nova.virt.hardware [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 806.521721] env[61629]: DEBUG nova.virt.hardware [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 806.522073] env[61629]: DEBUG nova.virt.hardware [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 806.522347] env[61629]: DEBUG nova.virt.hardware [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 806.522616] env[61629]: DEBUG nova.virt.hardware [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 806.522939] env[61629]: DEBUG nova.virt.hardware [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 806.523224] env[61629]: DEBUG nova.virt.hardware [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 806.523578] env[61629]: DEBUG nova.virt.hardware [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 806.523867] env[61629]: DEBUG nova.virt.hardware [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 806.524197] env[61629]: DEBUG nova.virt.hardware [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 806.525412] env[61629]: DEBUG nova.scheduler.client.report [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 806.531086] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd53967e-447a-4f1f-97fc-ecc6945a3b68 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.542682] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f226e5f6-62c4-4d3f-8f16-4e18fbd3f295 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.558667] env[61629]: ERROR nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e42b573e-58a2-49f8-ada0-d3ef74259470, please check neutron logs for more information. [ 806.558667] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Traceback (most recent call last): [ 806.558667] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 806.558667] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] yield resources [ 806.558667] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 806.558667] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] self.driver.spawn(context, instance, image_meta, [ 806.558667] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 806.558667] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 806.558667] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 806.558667] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] vm_ref = self.build_virtual_machine(instance, [ 806.558667] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 806.559341] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] vif_infos = vmwarevif.get_vif_info(self._session, [ 806.559341] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 806.559341] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] for vif in network_info: [ 806.559341] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 806.559341] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] return self._sync_wrapper(fn, *args, **kwargs) [ 806.559341] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 806.559341] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] self.wait() [ 806.559341] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 806.559341] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] self[:] = self._gt.wait() [ 806.559341] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 806.559341] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] return self._exit_event.wait() [ 806.559341] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 806.559341] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] current.throw(*self._exc) [ 806.559644] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 806.559644] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] result = function(*args, **kwargs) [ 806.559644] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 806.559644] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] return func(*args, **kwargs) [ 806.559644] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 806.559644] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] raise e [ 806.559644] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 806.559644] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] nwinfo = self.network_api.allocate_for_instance( [ 806.559644] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 806.559644] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] created_port_ids = self._update_ports_for_instance( [ 806.559644] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 806.559644] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] with excutils.save_and_reraise_exception(): [ 806.559644] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 806.560009] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] self.force_reraise() [ 806.560009] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 806.560009] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] raise self.value [ 806.560009] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 806.560009] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] updated_port = self._update_port( [ 806.560009] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 806.560009] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] _ensure_no_port_binding_failure(port) [ 806.560009] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 806.560009] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] raise exception.PortBindingFailed(port_id=port['id']) [ 806.560009] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] nova.exception.PortBindingFailed: Binding failed for port e42b573e-58a2-49f8-ada0-d3ef74259470, please check neutron logs for more information. [ 806.560009] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] [ 806.560009] env[61629]: INFO nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Terminating instance [ 806.564054] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Acquiring lock "refresh_cache-fe6adbf6-be78-45ee-a136-b7e538fb124b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.564154] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Acquired lock "refresh_cache-fe6adbf6-be78-45ee-a136-b7e538fb124b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.564276] env[61629]: DEBUG nova.network.neutron [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.038780] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.900s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.038780] env[61629]: ERROR nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2e13df7b-e819-486c-a526-1917db0db79a, please check neutron logs for more information. [ 807.038780] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Traceback (most recent call last): [ 807.038780] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 807.038780] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] self.driver.spawn(context, instance, image_meta, [ 807.038780] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 807.038780] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 807.038780] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 807.038780] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] vm_ref = self.build_virtual_machine(instance, [ 807.039452] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 807.039452] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] vif_infos = vmwarevif.get_vif_info(self._session, [ 807.039452] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 807.039452] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] for vif in network_info: [ 807.039452] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 807.039452] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] return self._sync_wrapper(fn, *args, **kwargs) [ 807.039452] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 807.039452] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] self.wait() [ 807.039452] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 807.039452] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] self[:] = self._gt.wait() [ 807.039452] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 807.039452] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] return self._exit_event.wait() [ 807.039452] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 807.039944] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] result = hub.switch() [ 807.039944] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 807.039944] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] return self.greenlet.switch() [ 807.039944] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 807.039944] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] result = function(*args, **kwargs) [ 807.039944] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 807.039944] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] return func(*args, **kwargs) [ 807.039944] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 807.039944] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] raise e [ 807.039944] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 807.039944] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] nwinfo = self.network_api.allocate_for_instance( [ 807.039944] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 807.039944] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] created_port_ids = self._update_ports_for_instance( [ 807.040561] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 807.040561] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] with excutils.save_and_reraise_exception(): [ 807.040561] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 807.040561] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] self.force_reraise() [ 807.040561] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 807.040561] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] raise self.value [ 807.040561] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 807.040561] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] updated_port = self._update_port( [ 807.040561] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 807.040561] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] _ensure_no_port_binding_failure(port) [ 807.040561] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 807.040561] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] raise exception.PortBindingFailed(port_id=port['id']) [ 807.041085] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] nova.exception.PortBindingFailed: Binding failed for port 2e13df7b-e819-486c-a526-1917db0db79a, please check neutron logs for more information. [ 807.041085] env[61629]: ERROR nova.compute.manager [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] [ 807.041085] env[61629]: DEBUG nova.compute.utils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Binding failed for port 2e13df7b-e819-486c-a526-1917db0db79a, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 807.041085] env[61629]: DEBUG nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Build of instance 3b7866fb-213a-46a7-b31c-4ce5598591c4 was re-scheduled: Binding failed for port 2e13df7b-e819-486c-a526-1917db0db79a, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 807.041331] env[61629]: DEBUG nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 807.041535] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "refresh_cache-3b7866fb-213a-46a7-b31c-4ce5598591c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.041679] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "refresh_cache-3b7866fb-213a-46a7-b31c-4ce5598591c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.041835] env[61629]: DEBUG nova.network.neutron [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.046538] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.825s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.048830] env[61629]: INFO nova.compute.claims [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 807.083561] env[61629]: DEBUG nova.network.neutron [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.149629] env[61629]: ERROR nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7ca8eecb-5fce-4c3d-9b83-197d53c8f97d, please check neutron logs for more information. [ 807.149629] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 807.149629] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 807.149629] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 807.149629] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 807.149629] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 807.149629] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 807.149629] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 807.149629] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 807.149629] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 807.149629] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 807.149629] env[61629]: ERROR nova.compute.manager raise self.value [ 807.149629] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 807.149629] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 807.149629] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 807.149629] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 807.150098] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 807.150098] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 807.150098] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7ca8eecb-5fce-4c3d-9b83-197d53c8f97d, please check neutron logs for more information. [ 807.150098] env[61629]: ERROR nova.compute.manager [ 807.150098] env[61629]: Traceback (most recent call last): [ 807.150098] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 807.150098] env[61629]: listener.cb(fileno) [ 807.150098] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 807.150098] env[61629]: result = function(*args, **kwargs) [ 807.150098] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 807.150098] env[61629]: return func(*args, **kwargs) [ 807.150098] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 807.150098] env[61629]: raise e [ 807.150098] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 807.150098] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 807.150098] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 807.150098] env[61629]: created_port_ids = self._update_ports_for_instance( [ 807.150098] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 807.150098] env[61629]: with excutils.save_and_reraise_exception(): [ 807.150098] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 807.150098] env[61629]: self.force_reraise() [ 807.150098] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 807.150098] env[61629]: raise self.value [ 807.150098] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 807.150098] env[61629]: updated_port = self._update_port( [ 807.150098] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 807.150098] env[61629]: _ensure_no_port_binding_failure(port) [ 807.150098] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 807.150098] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 807.150949] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 7ca8eecb-5fce-4c3d-9b83-197d53c8f97d, please check neutron logs for more information. [ 807.150949] env[61629]: Removing descriptor: 15 [ 807.165089] env[61629]: DEBUG nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 807.178942] env[61629]: DEBUG nova.network.neutron [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.188341] env[61629]: DEBUG nova.virt.hardware [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 807.188576] env[61629]: DEBUG nova.virt.hardware [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 807.188732] env[61629]: DEBUG nova.virt.hardware [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 807.188914] env[61629]: DEBUG nova.virt.hardware [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 807.189071] env[61629]: DEBUG nova.virt.hardware [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 807.189220] env[61629]: DEBUG nova.virt.hardware [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 807.189426] env[61629]: DEBUG nova.virt.hardware [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 807.189580] env[61629]: DEBUG nova.virt.hardware [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 807.189742] env[61629]: DEBUG nova.virt.hardware [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 807.189899] env[61629]: DEBUG nova.virt.hardware [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 807.190085] env[61629]: DEBUG nova.virt.hardware [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 807.190955] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64944ef5-1f60-4bdc-a932-dc7e98655850 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.199579] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba0dce6-133c-49d0-9b55-9275402cf6ea {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.213804] env[61629]: ERROR nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7ca8eecb-5fce-4c3d-9b83-197d53c8f97d, please check neutron logs for more information. [ 807.213804] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Traceback (most recent call last): [ 807.213804] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 807.213804] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] yield resources [ 807.213804] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 807.213804] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] self.driver.spawn(context, instance, image_meta, [ 807.213804] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 807.213804] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 807.213804] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 807.213804] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] vm_ref = self.build_virtual_machine(instance, [ 807.213804] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 807.214260] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] vif_infos = vmwarevif.get_vif_info(self._session, [ 807.214260] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 807.214260] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] for vif in network_info: [ 807.214260] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 807.214260] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] return self._sync_wrapper(fn, *args, **kwargs) [ 807.214260] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 807.214260] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] self.wait() [ 807.214260] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 807.214260] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] self[:] = self._gt.wait() [ 807.214260] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 807.214260] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] return self._exit_event.wait() [ 807.214260] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 807.214260] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] current.throw(*self._exc) [ 807.214641] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 807.214641] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] result = function(*args, **kwargs) [ 807.214641] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 807.214641] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] return func(*args, **kwargs) [ 807.214641] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 807.214641] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] raise e [ 807.214641] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 807.214641] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] nwinfo = self.network_api.allocate_for_instance( [ 807.214641] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 807.214641] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] created_port_ids = self._update_ports_for_instance( [ 807.214641] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 807.214641] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] with excutils.save_and_reraise_exception(): [ 807.214641] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 807.214975] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] self.force_reraise() [ 807.214975] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 807.214975] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] raise self.value [ 807.214975] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 807.214975] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] updated_port = self._update_port( [ 807.214975] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 807.214975] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] _ensure_no_port_binding_failure(port) [ 807.214975] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 807.214975] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] raise exception.PortBindingFailed(port_id=port['id']) [ 807.214975] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] nova.exception.PortBindingFailed: Binding failed for port 7ca8eecb-5fce-4c3d-9b83-197d53c8f97d, please check neutron logs for more information. [ 807.214975] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] [ 807.214975] env[61629]: INFO nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Terminating instance [ 807.216098] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Acquiring lock "refresh_cache-355aa564-3067-4a3c-92de-4ab6e2b8fa6b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.216259] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Acquired lock "refresh_cache-355aa564-3067-4a3c-92de-4ab6e2b8fa6b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.216423] env[61629]: DEBUG nova.network.neutron [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.561055] env[61629]: DEBUG nova.network.neutron [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.627323] env[61629]: DEBUG nova.network.neutron [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.681077] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Releasing lock "refresh_cache-fe6adbf6-be78-45ee-a136-b7e538fb124b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.681668] env[61629]: DEBUG nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 807.681984] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f90784ee-5982-4bcc-b305-cbbd1731c3b1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.691127] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e749860-69c7-49ba-9c6c-d01798e5103f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.713030] env[61629]: WARNING nova.virt.vmwareapi.driver [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance fe6adbf6-be78-45ee-a136-b7e538fb124b could not be found. [ 807.713030] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 807.713319] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae6eb4af-1e13-43d4-8830-0d5ad7c082f2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.722836] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc838044-62a3-43cb-b3a8-03bb24a8373f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.745207] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fe6adbf6-be78-45ee-a136-b7e538fb124b could not be found. [ 807.745399] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 807.745600] env[61629]: INFO nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Took 0.06 seconds to destroy the instance on the hypervisor. [ 807.746988] env[61629]: DEBUG oslo.service.loopingcall [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 807.746988] env[61629]: DEBUG nova.compute.manager [-] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 807.746988] env[61629]: DEBUG nova.network.neutron [-] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 807.748270] env[61629]: DEBUG nova.network.neutron [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.768036] env[61629]: DEBUG nova.network.neutron [-] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.845541] env[61629]: DEBUG nova.network.neutron [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.129332] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "refresh_cache-3b7866fb-213a-46a7-b31c-4ce5598591c4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.129578] env[61629]: DEBUG nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 808.129752] env[61629]: DEBUG nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 808.129920] env[61629]: DEBUG nova.network.neutron [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.147605] env[61629]: DEBUG nova.network.neutron [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.270283] env[61629]: DEBUG nova.network.neutron [-] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.322602] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df16d4a-2a7d-4df5-9ba2-717544a432e8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.331320] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b202f58-eb58-43d3-9d85-967108a41245 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.360105] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Releasing lock "refresh_cache-355aa564-3067-4a3c-92de-4ab6e2b8fa6b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.360724] env[61629]: DEBUG nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 808.361058] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 808.361797] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32e8ce54-98b7-428e-816d-0b3cf4536c93 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.363999] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5d9af6-f1c0-4345-b004-890e4abab81a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.372034] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ab4dd8-7c36-4418-bbf8-606f21f5f145 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.377974] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf57865-a81b-401f-8706-c9de2fe0c7d4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.397794] env[61629]: DEBUG nova.compute.provider_tree [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.402427] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 355aa564-3067-4a3c-92de-4ab6e2b8fa6b could not be found. [ 808.402509] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 808.402684] env[61629]: INFO nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 808.402916] env[61629]: DEBUG oslo.service.loopingcall [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 808.403345] env[61629]: DEBUG nova.compute.manager [-] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 808.403449] env[61629]: DEBUG nova.network.neutron [-] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.419094] env[61629]: DEBUG nova.network.neutron [-] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.475995] env[61629]: DEBUG nova.compute.manager [req-f99699a8-1b01-4c03-a060-7762076a910a req-a82d6bd3-cbdd-4c66-8565-209bcb00802d service nova] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Received event network-changed-7ca8eecb-5fce-4c3d-9b83-197d53c8f97d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 808.476245] env[61629]: DEBUG nova.compute.manager [req-f99699a8-1b01-4c03-a060-7762076a910a req-a82d6bd3-cbdd-4c66-8565-209bcb00802d service nova] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Refreshing instance network info cache due to event network-changed-7ca8eecb-5fce-4c3d-9b83-197d53c8f97d. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 808.476497] env[61629]: DEBUG oslo_concurrency.lockutils [req-f99699a8-1b01-4c03-a060-7762076a910a req-a82d6bd3-cbdd-4c66-8565-209bcb00802d service nova] Acquiring lock "refresh_cache-355aa564-3067-4a3c-92de-4ab6e2b8fa6b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.476573] env[61629]: DEBUG oslo_concurrency.lockutils [req-f99699a8-1b01-4c03-a060-7762076a910a req-a82d6bd3-cbdd-4c66-8565-209bcb00802d service nova] Acquired lock "refresh_cache-355aa564-3067-4a3c-92de-4ab6e2b8fa6b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.476735] env[61629]: DEBUG nova.network.neutron [req-f99699a8-1b01-4c03-a060-7762076a910a req-a82d6bd3-cbdd-4c66-8565-209bcb00802d service nova] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Refreshing network info cache for port 7ca8eecb-5fce-4c3d-9b83-197d53c8f97d {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 808.649826] env[61629]: DEBUG nova.network.neutron [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.772906] env[61629]: INFO nova.compute.manager [-] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Took 1.03 seconds to deallocate network for instance. [ 808.904805] env[61629]: DEBUG nova.scheduler.client.report [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 808.921478] env[61629]: DEBUG nova.network.neutron [-] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.993580] env[61629]: DEBUG nova.network.neutron [req-f99699a8-1b01-4c03-a060-7762076a910a req-a82d6bd3-cbdd-4c66-8565-209bcb00802d service nova] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 809.069645] env[61629]: DEBUG nova.network.neutron [req-f99699a8-1b01-4c03-a060-7762076a910a req-a82d6bd3-cbdd-4c66-8565-209bcb00802d service nova] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.151991] env[61629]: INFO nova.compute.manager [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 3b7866fb-213a-46a7-b31c-4ce5598591c4] Took 1.02 seconds to deallocate network for instance. [ 809.325384] env[61629]: INFO nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Took 0.55 seconds to detach 1 volumes for instance. [ 809.327671] env[61629]: DEBUG nova.compute.claims [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 809.327847] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.410745] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.363s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.410745] env[61629]: DEBUG nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 809.413400] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.388s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.414797] env[61629]: INFO nova.compute.claims [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 809.423916] env[61629]: INFO nova.compute.manager [-] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Took 1.02 seconds to deallocate network for instance. [ 809.425796] env[61629]: DEBUG nova.compute.claims [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 809.425962] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.572979] env[61629]: DEBUG oslo_concurrency.lockutils [req-f99699a8-1b01-4c03-a060-7762076a910a req-a82d6bd3-cbdd-4c66-8565-209bcb00802d service nova] Releasing lock "refresh_cache-355aa564-3067-4a3c-92de-4ab6e2b8fa6b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.572979] env[61629]: DEBUG nova.compute.manager [req-f99699a8-1b01-4c03-a060-7762076a910a req-a82d6bd3-cbdd-4c66-8565-209bcb00802d service nova] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Received event network-vif-deleted-7ca8eecb-5fce-4c3d-9b83-197d53c8f97d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 809.919434] env[61629]: DEBUG nova.compute.utils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 809.922446] env[61629]: DEBUG nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 809.922823] env[61629]: DEBUG nova.network.neutron [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 809.966609] env[61629]: DEBUG nova.policy [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '046caa939b1a44589ed11647bc1076c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'af7c60ee4d504ac6bcec79744610ec9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 810.178441] env[61629]: INFO nova.scheduler.client.report [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleted allocations for instance 3b7866fb-213a-46a7-b31c-4ce5598591c4 [ 810.261020] env[61629]: DEBUG nova.network.neutron [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Successfully created port: 120c8916-737e-4203-9588-a312be54933c {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 810.423897] env[61629]: DEBUG nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 810.689582] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c9695ca0-de24-469d-9c46-0c0cad32101d tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "3b7866fb-213a-46a7-b31c-4ce5598591c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 160.465s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.726735] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8fc687-76e5-43f3-b465-4db96a3251c3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.734530] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5b50eb-d50b-4a75-b4c3-441a3d5957f7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.768745] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4cccbb0-82c9-42cb-8a2e-5abec5f65b2c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.778992] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e884501b-421d-4924-9c71-af13039d2d45 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.792123] env[61629]: DEBUG nova.compute.provider_tree [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 811.117474] env[61629]: DEBUG nova.compute.manager [req-cc6815d7-a613-402a-b20e-862c93124fed req-3d7803dc-0bfa-4b9f-9883-50169e3e1a7a service nova] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Received event network-changed-120c8916-737e-4203-9588-a312be54933c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 811.117562] env[61629]: DEBUG nova.compute.manager [req-cc6815d7-a613-402a-b20e-862c93124fed req-3d7803dc-0bfa-4b9f-9883-50169e3e1a7a service nova] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Refreshing instance network info cache due to event network-changed-120c8916-737e-4203-9588-a312be54933c. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 811.117838] env[61629]: DEBUG oslo_concurrency.lockutils [req-cc6815d7-a613-402a-b20e-862c93124fed req-3d7803dc-0bfa-4b9f-9883-50169e3e1a7a service nova] Acquiring lock "refresh_cache-28af8dc5-0817-43e7-bce0-3491971efb0c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.117893] env[61629]: DEBUG oslo_concurrency.lockutils [req-cc6815d7-a613-402a-b20e-862c93124fed req-3d7803dc-0bfa-4b9f-9883-50169e3e1a7a service nova] Acquired lock "refresh_cache-28af8dc5-0817-43e7-bce0-3491971efb0c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.118052] env[61629]: DEBUG nova.network.neutron [req-cc6815d7-a613-402a-b20e-862c93124fed req-3d7803dc-0bfa-4b9f-9883-50169e3e1a7a service nova] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Refreshing network info cache for port 120c8916-737e-4203-9588-a312be54933c {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 811.192749] env[61629]: DEBUG nova.compute.manager [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 811.287771] env[61629]: ERROR nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 120c8916-737e-4203-9588-a312be54933c, please check neutron logs for more information. [ 811.287771] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 811.287771] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 811.287771] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 811.287771] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 811.287771] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 811.287771] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 811.287771] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 811.287771] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.287771] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 811.287771] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.287771] env[61629]: ERROR nova.compute.manager raise self.value [ 811.287771] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 811.287771] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 811.287771] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 811.287771] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 811.288154] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 811.288154] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 811.288154] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 120c8916-737e-4203-9588-a312be54933c, please check neutron logs for more information. [ 811.288154] env[61629]: ERROR nova.compute.manager [ 811.288154] env[61629]: Traceback (most recent call last): [ 811.288154] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 811.288154] env[61629]: listener.cb(fileno) [ 811.288154] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 811.288154] env[61629]: result = function(*args, **kwargs) [ 811.288154] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 811.288154] env[61629]: return func(*args, **kwargs) [ 811.288154] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 811.288154] env[61629]: raise e [ 811.288154] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 811.288154] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 811.288154] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 811.288154] env[61629]: created_port_ids = self._update_ports_for_instance( [ 811.288154] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 811.288154] env[61629]: with excutils.save_and_reraise_exception(): [ 811.288154] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.288154] env[61629]: self.force_reraise() [ 811.288154] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.288154] env[61629]: raise self.value [ 811.288154] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 811.288154] env[61629]: updated_port = self._update_port( [ 811.288154] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 811.288154] env[61629]: _ensure_no_port_binding_failure(port) [ 811.288154] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 811.288154] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 811.288912] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 120c8916-737e-4203-9588-a312be54933c, please check neutron logs for more information. [ 811.288912] env[61629]: Removing descriptor: 21 [ 811.295428] env[61629]: DEBUG nova.scheduler.client.report [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 811.437944] env[61629]: DEBUG nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 811.458012] env[61629]: DEBUG nova.virt.hardware [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 811.458274] env[61629]: DEBUG nova.virt.hardware [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 811.458430] env[61629]: DEBUG nova.virt.hardware [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 811.458607] env[61629]: DEBUG nova.virt.hardware [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 811.458749] env[61629]: DEBUG nova.virt.hardware [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 811.458888] env[61629]: DEBUG nova.virt.hardware [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 811.459107] env[61629]: DEBUG nova.virt.hardware [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 811.459267] env[61629]: DEBUG nova.virt.hardware [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 811.459428] env[61629]: DEBUG nova.virt.hardware [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 811.459591] env[61629]: DEBUG nova.virt.hardware [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 811.459758] env[61629]: DEBUG nova.virt.hardware [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 811.460639] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d283ac-3dc3-40b8-b13c-d949a950bf49 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.470108] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f582805c-7f51-4323-bdfe-a63039d282f6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.484191] env[61629]: ERROR nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 120c8916-737e-4203-9588-a312be54933c, please check neutron logs for more information. [ 811.484191] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Traceback (most recent call last): [ 811.484191] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 811.484191] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] yield resources [ 811.484191] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 811.484191] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] self.driver.spawn(context, instance, image_meta, [ 811.484191] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 811.484191] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 811.484191] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 811.484191] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] vm_ref = self.build_virtual_machine(instance, [ 811.484191] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 811.485239] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] vif_infos = vmwarevif.get_vif_info(self._session, [ 811.485239] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 811.485239] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] for vif in network_info: [ 811.485239] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 811.485239] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] return self._sync_wrapper(fn, *args, **kwargs) [ 811.485239] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 811.485239] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] self.wait() [ 811.485239] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 811.485239] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] self[:] = self._gt.wait() [ 811.485239] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 811.485239] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] return self._exit_event.wait() [ 811.485239] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 811.485239] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] current.throw(*self._exc) [ 811.485573] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 811.485573] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] result = function(*args, **kwargs) [ 811.485573] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 811.485573] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] return func(*args, **kwargs) [ 811.485573] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 811.485573] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] raise e [ 811.485573] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 811.485573] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] nwinfo = self.network_api.allocate_for_instance( [ 811.485573] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 811.485573] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] created_port_ids = self._update_ports_for_instance( [ 811.485573] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 811.485573] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] with excutils.save_and_reraise_exception(): [ 811.485573] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.485911] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] self.force_reraise() [ 811.485911] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.485911] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] raise self.value [ 811.485911] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 811.485911] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] updated_port = self._update_port( [ 811.485911] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 811.485911] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] _ensure_no_port_binding_failure(port) [ 811.485911] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 811.485911] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] raise exception.PortBindingFailed(port_id=port['id']) [ 811.485911] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] nova.exception.PortBindingFailed: Binding failed for port 120c8916-737e-4203-9588-a312be54933c, please check neutron logs for more information. [ 811.485911] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] [ 811.485911] env[61629]: INFO nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Terminating instance [ 811.487396] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Acquiring lock "refresh_cache-28af8dc5-0817-43e7-bce0-3491971efb0c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.644332] env[61629]: DEBUG nova.network.neutron [req-cc6815d7-a613-402a-b20e-862c93124fed req-3d7803dc-0bfa-4b9f-9883-50169e3e1a7a service nova] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 811.711443] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.726650] env[61629]: DEBUG nova.network.neutron [req-cc6815d7-a613-402a-b20e-862c93124fed req-3d7803dc-0bfa-4b9f-9883-50169e3e1a7a service nova] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.800080] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.387s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.800591] env[61629]: DEBUG nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 811.803637] env[61629]: DEBUG oslo_concurrency.lockutils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.397s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.978204] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "55f2d2fc-9404-422f-ba08-72e6e11a089f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.978440] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "55f2d2fc-9404-422f-ba08-72e6e11a089f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.229073] env[61629]: DEBUG oslo_concurrency.lockutils [req-cc6815d7-a613-402a-b20e-862c93124fed req-3d7803dc-0bfa-4b9f-9883-50169e3e1a7a service nova] Releasing lock "refresh_cache-28af8dc5-0817-43e7-bce0-3491971efb0c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.229475] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Acquired lock "refresh_cache-28af8dc5-0817-43e7-bce0-3491971efb0c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.229668] env[61629]: DEBUG nova.network.neutron [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 812.307958] env[61629]: DEBUG nova.compute.utils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 812.313454] env[61629]: DEBUG nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 812.313627] env[61629]: DEBUG nova.network.neutron [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 812.364405] env[61629]: DEBUG nova.policy [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3cbfdc70fad64e8ab37fb9e0c1a10e0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bc538b7901b4d65a6107db047063183', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 812.565581] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28855c3e-fe46-4791-8e33-facc51faa7bf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.573388] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0571fba-7fd4-4936-8c6e-a995e3c5e2b3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.603043] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0848d540-f728-4b1f-b2d3-c5c008c4e004 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.610253] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a95095f-1427-426d-bb37-4a5e990d664e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.623560] env[61629]: DEBUG nova.compute.provider_tree [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.668069] env[61629]: DEBUG nova.network.neutron [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Successfully created port: bc671ae6-2cb4-46b5-8289-516f2007bc6b {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 812.755406] env[61629]: DEBUG nova.network.neutron [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.814122] env[61629]: DEBUG nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 812.826542] env[61629]: DEBUG nova.network.neutron [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.127075] env[61629]: DEBUG nova.scheduler.client.report [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 813.139787] env[61629]: DEBUG nova.compute.manager [req-802956a1-38fc-4b39-8a6b-4ff91fef3860 req-5c52a3ee-572a-487a-8a54-916e79d44925 service nova] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Received event network-vif-deleted-120c8916-737e-4203-9588-a312be54933c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 813.329208] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Releasing lock "refresh_cache-28af8dc5-0817-43e7-bce0-3491971efb0c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.329599] env[61629]: DEBUG nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 813.329793] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 813.330088] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-337f9574-04f7-4385-9539-febd82275c85 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.339079] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa4b5dc-00d7-42ca-8975-a28fa4ffaeb3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.360129] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 28af8dc5-0817-43e7-bce0-3491971efb0c could not be found. [ 813.360350] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 813.360532] env[61629]: INFO nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 813.360775] env[61629]: DEBUG oslo.service.loopingcall [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 813.360991] env[61629]: DEBUG nova.compute.manager [-] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 813.361095] env[61629]: DEBUG nova.network.neutron [-] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 813.376589] env[61629]: DEBUG nova.network.neutron [-] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 813.581141] env[61629]: ERROR nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bc671ae6-2cb4-46b5-8289-516f2007bc6b, please check neutron logs for more information. [ 813.581141] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 813.581141] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 813.581141] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 813.581141] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 813.581141] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 813.581141] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 813.581141] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 813.581141] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 813.581141] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 813.581141] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 813.581141] env[61629]: ERROR nova.compute.manager raise self.value [ 813.581141] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 813.581141] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 813.581141] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 813.581141] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 813.582431] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 813.582431] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 813.582431] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bc671ae6-2cb4-46b5-8289-516f2007bc6b, please check neutron logs for more information. [ 813.582431] env[61629]: ERROR nova.compute.manager [ 813.582431] env[61629]: Traceback (most recent call last): [ 813.582431] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 813.582431] env[61629]: listener.cb(fileno) [ 813.582431] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 813.582431] env[61629]: result = function(*args, **kwargs) [ 813.582431] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 813.582431] env[61629]: return func(*args, **kwargs) [ 813.582431] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 813.582431] env[61629]: raise e [ 813.582431] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 813.582431] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 813.582431] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 813.582431] env[61629]: created_port_ids = self._update_ports_for_instance( [ 813.582431] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 813.582431] env[61629]: with excutils.save_and_reraise_exception(): [ 813.582431] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 813.582431] env[61629]: self.force_reraise() [ 813.582431] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 813.582431] env[61629]: raise self.value [ 813.582431] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 813.582431] env[61629]: updated_port = self._update_port( [ 813.582431] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 813.582431] env[61629]: _ensure_no_port_binding_failure(port) [ 813.582431] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 813.582431] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 813.583398] env[61629]: nova.exception.PortBindingFailed: Binding failed for port bc671ae6-2cb4-46b5-8289-516f2007bc6b, please check neutron logs for more information. [ 813.583398] env[61629]: Removing descriptor: 21 [ 813.632450] env[61629]: DEBUG oslo_concurrency.lockutils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.829s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.633177] env[61629]: ERROR nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ed92b04d-8f51-4520-ac29-2149f7a8fdab, please check neutron logs for more information. [ 813.633177] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Traceback (most recent call last): [ 813.633177] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 813.633177] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] self.driver.spawn(context, instance, image_meta, [ 813.633177] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 813.633177] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 813.633177] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 813.633177] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] vm_ref = self.build_virtual_machine(instance, [ 813.633177] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 813.633177] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] vif_infos = vmwarevif.get_vif_info(self._session, [ 813.633177] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 813.633454] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] for vif in network_info: [ 813.633454] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 813.633454] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] return self._sync_wrapper(fn, *args, **kwargs) [ 813.633454] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 813.633454] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] self.wait() [ 813.633454] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 813.633454] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] self[:] = self._gt.wait() [ 813.633454] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 813.633454] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] return self._exit_event.wait() [ 813.633454] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 813.633454] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] result = hub.switch() [ 813.633454] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 813.633454] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] return self.greenlet.switch() [ 813.633787] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 813.633787] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] result = function(*args, **kwargs) [ 813.633787] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 813.633787] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] return func(*args, **kwargs) [ 813.633787] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 813.633787] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] raise e [ 813.633787] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 813.633787] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] nwinfo = self.network_api.allocate_for_instance( [ 813.633787] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 813.633787] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] created_port_ids = self._update_ports_for_instance( [ 813.633787] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 813.633787] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] with excutils.save_and_reraise_exception(): [ 813.633787] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 813.634188] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] self.force_reraise() [ 813.634188] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 813.634188] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] raise self.value [ 813.634188] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 813.634188] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] updated_port = self._update_port( [ 813.634188] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 813.634188] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] _ensure_no_port_binding_failure(port) [ 813.634188] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 813.634188] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] raise exception.PortBindingFailed(port_id=port['id']) [ 813.634188] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] nova.exception.PortBindingFailed: Binding failed for port ed92b04d-8f51-4520-ac29-2149f7a8fdab, please check neutron logs for more information. [ 813.634188] env[61629]: ERROR nova.compute.manager [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] [ 813.634489] env[61629]: DEBUG nova.compute.utils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Binding failed for port ed92b04d-8f51-4520-ac29-2149f7a8fdab, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 813.635249] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.182s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.636691] env[61629]: INFO nova.compute.claims [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 813.639418] env[61629]: DEBUG nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Build of instance c3724b2e-4f6b-4db5-a68f-41e410e561e9 was re-scheduled: Binding failed for port ed92b04d-8f51-4520-ac29-2149f7a8fdab, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 813.639777] env[61629]: DEBUG nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 813.640011] env[61629]: DEBUG oslo_concurrency.lockutils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Acquiring lock "refresh_cache-c3724b2e-4f6b-4db5-a68f-41e410e561e9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.640165] env[61629]: DEBUG oslo_concurrency.lockutils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Acquired lock "refresh_cache-c3724b2e-4f6b-4db5-a68f-41e410e561e9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.640328] env[61629]: DEBUG nova.network.neutron [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 813.823392] env[61629]: DEBUG nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 813.857238] env[61629]: DEBUG nova.virt.hardware [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 813.857486] env[61629]: DEBUG nova.virt.hardware [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 813.857644] env[61629]: DEBUG nova.virt.hardware [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 813.857818] env[61629]: DEBUG nova.virt.hardware [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 813.857963] env[61629]: DEBUG nova.virt.hardware [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 813.858124] env[61629]: DEBUG nova.virt.hardware [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 813.858328] env[61629]: DEBUG nova.virt.hardware [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 813.858482] env[61629]: DEBUG nova.virt.hardware [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 813.858645] env[61629]: DEBUG nova.virt.hardware [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 813.858801] env[61629]: DEBUG nova.virt.hardware [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 813.858967] env[61629]: DEBUG nova.virt.hardware [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 813.859826] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0c6bab-f504-4544-9cab-5f1424162043 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.867834] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879f5320-3569-4e0f-bb92-5579ca5e9031 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.880541] env[61629]: DEBUG nova.network.neutron [-] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.882231] env[61629]: ERROR nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bc671ae6-2cb4-46b5-8289-516f2007bc6b, please check neutron logs for more information. [ 813.882231] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Traceback (most recent call last): [ 813.882231] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 813.882231] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] yield resources [ 813.882231] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 813.882231] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] self.driver.spawn(context, instance, image_meta, [ 813.882231] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 813.882231] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 813.882231] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 813.882231] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] vm_ref = self.build_virtual_machine(instance, [ 813.882231] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 813.882547] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] vif_infos = vmwarevif.get_vif_info(self._session, [ 813.882547] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 813.882547] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] for vif in network_info: [ 813.882547] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 813.882547] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] return self._sync_wrapper(fn, *args, **kwargs) [ 813.882547] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 813.882547] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] self.wait() [ 813.882547] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 813.882547] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] self[:] = self._gt.wait() [ 813.882547] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 813.882547] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] return self._exit_event.wait() [ 813.882547] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 813.882547] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] current.throw(*self._exc) [ 813.882862] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 813.882862] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] result = function(*args, **kwargs) [ 813.882862] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 813.882862] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] return func(*args, **kwargs) [ 813.882862] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 813.882862] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] raise e [ 813.882862] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 813.882862] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] nwinfo = self.network_api.allocate_for_instance( [ 813.882862] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 813.882862] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] created_port_ids = self._update_ports_for_instance( [ 813.882862] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 813.882862] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] with excutils.save_and_reraise_exception(): [ 813.882862] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 813.883202] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] self.force_reraise() [ 813.883202] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 813.883202] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] raise self.value [ 813.883202] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 813.883202] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] updated_port = self._update_port( [ 813.883202] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 813.883202] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] _ensure_no_port_binding_failure(port) [ 813.883202] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 813.883202] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] raise exception.PortBindingFailed(port_id=port['id']) [ 813.883202] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] nova.exception.PortBindingFailed: Binding failed for port bc671ae6-2cb4-46b5-8289-516f2007bc6b, please check neutron logs for more information. [ 813.883202] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] [ 813.883202] env[61629]: INFO nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Terminating instance [ 813.885752] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "refresh_cache-c5c6854c-1fe6-46e7-aee7-6a5e00d6027c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.885910] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquired lock "refresh_cache-c5c6854c-1fe6-46e7-aee7-6a5e00d6027c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.886083] env[61629]: DEBUG nova.network.neutron [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 814.161817] env[61629]: DEBUG nova.network.neutron [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 814.259079] env[61629]: DEBUG nova.network.neutron [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.384954] env[61629]: INFO nova.compute.manager [-] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Took 1.02 seconds to deallocate network for instance. [ 814.387161] env[61629]: DEBUG nova.compute.claims [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 814.387338] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.402473] env[61629]: DEBUG nova.network.neutron [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 814.468571] env[61629]: DEBUG nova.network.neutron [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.762025] env[61629]: DEBUG oslo_concurrency.lockutils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Releasing lock "refresh_cache-c3724b2e-4f6b-4db5-a68f-41e410e561e9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.762150] env[61629]: DEBUG nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 814.762773] env[61629]: DEBUG nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 814.762773] env[61629]: DEBUG nova.network.neutron [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 814.778732] env[61629]: DEBUG nova.network.neutron [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 814.917676] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117cc5e2-0196-46aa-8d42-1f5d4dd29b64 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.924955] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693a809a-76f6-45bb-a0f3-04bf39af8cb0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.953714] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec97ec3-972c-4b7a-9d1e-db3f69728874 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.960684] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9021f6-0afb-4670-81c3-b913a3d4777e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.974359] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Releasing lock "refresh_cache-c5c6854c-1fe6-46e7-aee7-6a5e00d6027c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.974766] env[61629]: DEBUG nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 814.974953] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 814.975394] env[61629]: DEBUG nova.compute.provider_tree [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.976592] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2b407311-eda2-40e8-92fc-78f3ef03cb80 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.984628] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df01f8e-8541-4176-9772-7000de7956f2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.005214] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c5c6854c-1fe6-46e7-aee7-6a5e00d6027c could not be found. [ 815.005367] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 815.005567] env[61629]: INFO nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 815.005808] env[61629]: DEBUG oslo.service.loopingcall [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 815.006023] env[61629]: DEBUG nova.compute.manager [-] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 815.006119] env[61629]: DEBUG nova.network.neutron [-] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 815.020038] env[61629]: DEBUG nova.network.neutron [-] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 815.162964] env[61629]: DEBUG nova.compute.manager [req-b2fa908a-e18a-4e82-87f2-1df1da44aab4 req-e52929e1-03e3-4e24-b865-5a7b91c54ec5 service nova] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Received event network-changed-bc671ae6-2cb4-46b5-8289-516f2007bc6b {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 815.163203] env[61629]: DEBUG nova.compute.manager [req-b2fa908a-e18a-4e82-87f2-1df1da44aab4 req-e52929e1-03e3-4e24-b865-5a7b91c54ec5 service nova] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Refreshing instance network info cache due to event network-changed-bc671ae6-2cb4-46b5-8289-516f2007bc6b. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 815.163422] env[61629]: DEBUG oslo_concurrency.lockutils [req-b2fa908a-e18a-4e82-87f2-1df1da44aab4 req-e52929e1-03e3-4e24-b865-5a7b91c54ec5 service nova] Acquiring lock "refresh_cache-c5c6854c-1fe6-46e7-aee7-6a5e00d6027c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.163580] env[61629]: DEBUG oslo_concurrency.lockutils [req-b2fa908a-e18a-4e82-87f2-1df1da44aab4 req-e52929e1-03e3-4e24-b865-5a7b91c54ec5 service nova] Acquired lock "refresh_cache-c5c6854c-1fe6-46e7-aee7-6a5e00d6027c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.163932] env[61629]: DEBUG nova.network.neutron [req-b2fa908a-e18a-4e82-87f2-1df1da44aab4 req-e52929e1-03e3-4e24-b865-5a7b91c54ec5 service nova] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Refreshing network info cache for port bc671ae6-2cb4-46b5-8289-516f2007bc6b {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 815.281250] env[61629]: DEBUG nova.network.neutron [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.479835] env[61629]: DEBUG nova.scheduler.client.report [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 815.521959] env[61629]: DEBUG nova.network.neutron [-] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.682422] env[61629]: DEBUG nova.network.neutron [req-b2fa908a-e18a-4e82-87f2-1df1da44aab4 req-e52929e1-03e3-4e24-b865-5a7b91c54ec5 service nova] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 815.766218] env[61629]: DEBUG nova.network.neutron [req-b2fa908a-e18a-4e82-87f2-1df1da44aab4 req-e52929e1-03e3-4e24-b865-5a7b91c54ec5 service nova] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.783544] env[61629]: INFO nova.compute.manager [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] [instance: c3724b2e-4f6b-4db5-a68f-41e410e561e9] Took 1.02 seconds to deallocate network for instance. [ 815.986949] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.350s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.986949] env[61629]: DEBUG nova.compute.manager [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 815.988711] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.406s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.990358] env[61629]: INFO nova.compute.claims [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.024625] env[61629]: INFO nova.compute.manager [-] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Took 1.02 seconds to deallocate network for instance. [ 816.027540] env[61629]: DEBUG nova.compute.claims [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 816.027947] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.269697] env[61629]: DEBUG oslo_concurrency.lockutils [req-b2fa908a-e18a-4e82-87f2-1df1da44aab4 req-e52929e1-03e3-4e24-b865-5a7b91c54ec5 service nova] Releasing lock "refresh_cache-c5c6854c-1fe6-46e7-aee7-6a5e00d6027c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.269951] env[61629]: DEBUG nova.compute.manager [req-b2fa908a-e18a-4e82-87f2-1df1da44aab4 req-e52929e1-03e3-4e24-b865-5a7b91c54ec5 service nova] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Received event network-vif-deleted-bc671ae6-2cb4-46b5-8289-516f2007bc6b {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 816.498823] env[61629]: DEBUG nova.compute.utils [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 816.499175] env[61629]: DEBUG nova.compute.manager [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Not allocating networking since 'none' was specified. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 816.810656] env[61629]: INFO nova.scheduler.client.report [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Deleted allocations for instance c3724b2e-4f6b-4db5-a68f-41e410e561e9 [ 817.002884] env[61629]: DEBUG nova.compute.manager [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 817.273521] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fd2ecd-2740-44ad-b2dc-a0c1ae4e9a94 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.281865] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba599513-7b5d-4502-8c01-7514be38011d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.313426] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c276ae80-d6e7-4001-811c-d3b1dbeca1d1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.320199] env[61629]: DEBUG oslo_concurrency.lockutils [None req-147f73a0-2005-477f-ae69-e84d4df57b2c tempest-FloatingIPsAssociationNegativeTestJSON-1408732935 tempest-FloatingIPsAssociationNegativeTestJSON-1408732935-project-member] Lock "c3724b2e-4f6b-4db5-a68f-41e410e561e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 166.473s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.322609] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a6e912-14f2-4929-af3b-1aa863d00e12 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.337642] env[61629]: DEBUG nova.compute.provider_tree [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.828368] env[61629]: DEBUG nova.compute.manager [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 817.841660] env[61629]: DEBUG nova.scheduler.client.report [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 818.012714] env[61629]: DEBUG nova.compute.manager [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 818.049437] env[61629]: DEBUG nova.virt.hardware [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 818.049665] env[61629]: DEBUG nova.virt.hardware [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 818.049813] env[61629]: DEBUG nova.virt.hardware [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 818.049985] env[61629]: DEBUG nova.virt.hardware [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 818.050154] env[61629]: DEBUG nova.virt.hardware [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 818.050310] env[61629]: DEBUG nova.virt.hardware [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 818.050470] env[61629]: DEBUG nova.virt.hardware [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 818.050619] env[61629]: DEBUG nova.virt.hardware [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 818.050775] env[61629]: DEBUG nova.virt.hardware [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 818.050926] env[61629]: DEBUG nova.virt.hardware [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 818.051238] env[61629]: DEBUG nova.virt.hardware [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 818.052146] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc9514e-9b6c-44da-b55c-2ab6c9632d36 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.060738] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57225f54-c24a-4432-8105-d9f19adcc976 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.076257] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Instance VIF info [] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 818.082220] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Creating folder: Project (d1f31fbb07854b7d9baa4b910928c902). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 818.082506] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-889da50b-366f-4282-9461-bd99bf4e9706 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.093459] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Created folder: Project (d1f31fbb07854b7d9baa4b910928c902) in parent group-v288443. [ 818.093650] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Creating folder: Instances. Parent ref: group-v288469. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 818.093878] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4e929b7-4813-4be3-ac48-8c74b943f43f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.103875] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Created folder: Instances in parent group-v288469. [ 818.104120] env[61629]: DEBUG oslo.service.loopingcall [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 818.104385] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 818.104502] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fec80057-1741-4605-9317-b10a2a4b5e74 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.122196] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 818.122196] env[61629]: value = "task-1354059" [ 818.122196] env[61629]: _type = "Task" [ 818.122196] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.129521] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354059, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.349397] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.360s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.350015] env[61629]: DEBUG nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 818.354923] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.742s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.354923] env[61629]: INFO nova.compute.claims [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 818.358412] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.632358] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354059, 'name': CreateVM_Task, 'duration_secs': 0.260563} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.632592] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 818.633109] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.633858] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.633858] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 818.633985] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1801e6fc-c2c0-49b6-b2d4-baf4740c344d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.639834] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Waiting for the task: (returnval){ [ 818.639834] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5201d2b1-e31a-cf99-d61a-2b6e78b7c569" [ 818.639834] env[61629]: _type = "Task" [ 818.639834] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.648613] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5201d2b1-e31a-cf99-d61a-2b6e78b7c569, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.860235] env[61629]: DEBUG nova.compute.utils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 818.863919] env[61629]: DEBUG nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 818.864533] env[61629]: DEBUG nova.network.neutron [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 818.911405] env[61629]: DEBUG nova.policy [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec39705b9dd24915a0b3723ea45a85d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38efdd2cc07f45a49fb06d590aafb96b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 819.149438] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5201d2b1-e31a-cf99-d61a-2b6e78b7c569, 'name': SearchDatastore_Task, 'duration_secs': 0.012704} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.149886] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.150260] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 819.150711] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.151583] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.151923] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 819.152299] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e82aa62-511e-4fd2-8a01-f15753bdbbff {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.163409] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 819.163774] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 819.164582] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d32909cc-eda4-459f-9568-f55c436e74f3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.169814] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Waiting for the task: (returnval){ [ 819.169814] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52763240-27fb-ace9-6eac-4f245bdd2e8f" [ 819.169814] env[61629]: _type = "Task" [ 819.169814] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.178790] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52763240-27fb-ace9-6eac-4f245bdd2e8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.218428] env[61629]: DEBUG nova.network.neutron [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Successfully created port: 4f397c05-0f7d-4b83-a52e-ffc278afadf8 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.367628] env[61629]: DEBUG nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 819.686821] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52763240-27fb-ace9-6eac-4f245bdd2e8f, 'name': SearchDatastore_Task, 'duration_secs': 0.008117} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.687662] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26e6efc8-9f97-43a4-bf34-d2eda6c26036 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.696243] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Waiting for the task: (returnval){ [ 819.696243] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52dff780-3095-4ac4-2193-7c1fda0326d0" [ 819.696243] env[61629]: _type = "Task" [ 819.696243] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.702362] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109f1a1e-427f-4504-926e-5e221407d8fe {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.708597] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52dff780-3095-4ac4-2193-7c1fda0326d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.712723] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4de5826-37f4-4261-ae66-083fecfe95de {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.745837] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0fda17e-2be1-4892-bc58-d3e1ee5d0a7e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.753318] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99670e7-b531-46a6-a486-f759f6d88d10 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.767104] env[61629]: DEBUG nova.compute.provider_tree [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.996528] env[61629]: DEBUG nova.compute.manager [req-b036809e-1911-4b1b-98fe-5fe39ad5436d req-e00e361f-a4ad-44a4-bd5e-8110d696b601 service nova] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Received event network-changed-4f397c05-0f7d-4b83-a52e-ffc278afadf8 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 819.996773] env[61629]: DEBUG nova.compute.manager [req-b036809e-1911-4b1b-98fe-5fe39ad5436d req-e00e361f-a4ad-44a4-bd5e-8110d696b601 service nova] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Refreshing instance network info cache due to event network-changed-4f397c05-0f7d-4b83-a52e-ffc278afadf8. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 819.996941] env[61629]: DEBUG oslo_concurrency.lockutils [req-b036809e-1911-4b1b-98fe-5fe39ad5436d req-e00e361f-a4ad-44a4-bd5e-8110d696b601 service nova] Acquiring lock "refresh_cache-76f08ac6-bb83-4d61-9707-b602028c54f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.997139] env[61629]: DEBUG oslo_concurrency.lockutils [req-b036809e-1911-4b1b-98fe-5fe39ad5436d req-e00e361f-a4ad-44a4-bd5e-8110d696b601 service nova] Acquired lock "refresh_cache-76f08ac6-bb83-4d61-9707-b602028c54f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.997337] env[61629]: DEBUG nova.network.neutron [req-b036809e-1911-4b1b-98fe-5fe39ad5436d req-e00e361f-a4ad-44a4-bd5e-8110d696b601 service nova] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Refreshing network info cache for port 4f397c05-0f7d-4b83-a52e-ffc278afadf8 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 820.210027] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52dff780-3095-4ac4-2193-7c1fda0326d0, 'name': SearchDatastore_Task, 'duration_secs': 0.008918} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.210139] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.210395] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] b8cfaef2-5f78-4026-90b8-fe2adacd61e0/b8cfaef2-5f78-4026-90b8-fe2adacd61e0.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 820.210690] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d80c8ff-1d59-4ce7-86f2-48c859bd8bcf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.218423] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Waiting for the task: (returnval){ [ 820.218423] env[61629]: value = "task-1354060" [ 820.218423] env[61629]: _type = "Task" [ 820.218423] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.226137] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': task-1354060, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.249024] env[61629]: ERROR nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4f397c05-0f7d-4b83-a52e-ffc278afadf8, please check neutron logs for more information. [ 820.249024] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 820.249024] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 820.249024] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 820.249024] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 820.249024] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 820.249024] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 820.249024] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 820.249024] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 820.249024] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 820.249024] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 820.249024] env[61629]: ERROR nova.compute.manager raise self.value [ 820.249024] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 820.249024] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 820.249024] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 820.249024] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 820.249499] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 820.249499] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 820.249499] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4f397c05-0f7d-4b83-a52e-ffc278afadf8, please check neutron logs for more information. [ 820.249499] env[61629]: ERROR nova.compute.manager [ 820.249499] env[61629]: Traceback (most recent call last): [ 820.249499] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 820.249499] env[61629]: listener.cb(fileno) [ 820.249499] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 820.249499] env[61629]: result = function(*args, **kwargs) [ 820.249499] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 820.249499] env[61629]: return func(*args, **kwargs) [ 820.249499] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 820.249499] env[61629]: raise e [ 820.249499] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 820.249499] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 820.249499] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 820.249499] env[61629]: created_port_ids = self._update_ports_for_instance( [ 820.249499] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 820.249499] env[61629]: with excutils.save_and_reraise_exception(): [ 820.249499] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 820.249499] env[61629]: self.force_reraise() [ 820.249499] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 820.249499] env[61629]: raise self.value [ 820.249499] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 820.249499] env[61629]: updated_port = self._update_port( [ 820.249499] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 820.249499] env[61629]: _ensure_no_port_binding_failure(port) [ 820.249499] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 820.249499] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 820.250267] env[61629]: nova.exception.PortBindingFailed: Binding failed for port 4f397c05-0f7d-4b83-a52e-ffc278afadf8, please check neutron logs for more information. [ 820.250267] env[61629]: Removing descriptor: 21 [ 820.268893] env[61629]: DEBUG nova.scheduler.client.report [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 820.382068] env[61629]: DEBUG nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 820.409979] env[61629]: DEBUG nova.virt.hardware [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 820.410273] env[61629]: DEBUG nova.virt.hardware [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 820.410428] env[61629]: DEBUG nova.virt.hardware [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.410607] env[61629]: DEBUG nova.virt.hardware [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 820.410752] env[61629]: DEBUG nova.virt.hardware [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.410897] env[61629]: DEBUG nova.virt.hardware [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 820.411119] env[61629]: DEBUG nova.virt.hardware [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 820.411278] env[61629]: DEBUG nova.virt.hardware [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 820.411445] env[61629]: DEBUG nova.virt.hardware [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 820.411606] env[61629]: DEBUG nova.virt.hardware [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 820.411777] env[61629]: DEBUG nova.virt.hardware [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 820.412715] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d68e33a-f53f-415d-9fef-963e83ab42a3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.420606] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a300d68-55ed-4214-8d07-6cebe28801db {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.434942] env[61629]: ERROR nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4f397c05-0f7d-4b83-a52e-ffc278afadf8, please check neutron logs for more information. [ 820.434942] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Traceback (most recent call last): [ 820.434942] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 820.434942] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] yield resources [ 820.434942] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 820.434942] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] self.driver.spawn(context, instance, image_meta, [ 820.434942] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 820.434942] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 820.434942] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 820.434942] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] vm_ref = self.build_virtual_machine(instance, [ 820.434942] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 820.435344] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] vif_infos = vmwarevif.get_vif_info(self._session, [ 820.435344] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 820.435344] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] for vif in network_info: [ 820.435344] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 820.435344] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] return self._sync_wrapper(fn, *args, **kwargs) [ 820.435344] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 820.435344] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] self.wait() [ 820.435344] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 820.435344] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] self[:] = self._gt.wait() [ 820.435344] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 820.435344] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] return self._exit_event.wait() [ 820.435344] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 820.435344] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] current.throw(*self._exc) [ 820.435658] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 820.435658] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] result = function(*args, **kwargs) [ 820.435658] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 820.435658] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] return func(*args, **kwargs) [ 820.435658] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 820.435658] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] raise e [ 820.435658] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 820.435658] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] nwinfo = self.network_api.allocate_for_instance( [ 820.435658] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 820.435658] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] created_port_ids = self._update_ports_for_instance( [ 820.435658] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 820.435658] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] with excutils.save_and_reraise_exception(): [ 820.435658] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 820.436000] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] self.force_reraise() [ 820.436000] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 820.436000] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] raise self.value [ 820.436000] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 820.436000] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] updated_port = self._update_port( [ 820.436000] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 820.436000] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] _ensure_no_port_binding_failure(port) [ 820.436000] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 820.436000] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] raise exception.PortBindingFailed(port_id=port['id']) [ 820.436000] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] nova.exception.PortBindingFailed: Binding failed for port 4f397c05-0f7d-4b83-a52e-ffc278afadf8, please check neutron logs for more information. [ 820.436000] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] [ 820.436000] env[61629]: INFO nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Terminating instance [ 820.437463] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "refresh_cache-76f08ac6-bb83-4d61-9707-b602028c54f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.517093] env[61629]: DEBUG nova.network.neutron [req-b036809e-1911-4b1b-98fe-5fe39ad5436d req-e00e361f-a4ad-44a4-bd5e-8110d696b601 service nova] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.609117] env[61629]: DEBUG nova.network.neutron [req-b036809e-1911-4b1b-98fe-5fe39ad5436d req-e00e361f-a4ad-44a4-bd5e-8110d696b601 service nova] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.729364] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': task-1354060, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471085} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.729712] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] b8cfaef2-5f78-4026-90b8-fe2adacd61e0/b8cfaef2-5f78-4026-90b8-fe2adacd61e0.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 820.729934] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 820.730205] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ff0fa481-f709-4a94-8e25-1579b1701980 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.736306] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Waiting for the task: (returnval){ [ 820.736306] env[61629]: value = "task-1354061" [ 820.736306] env[61629]: _type = "Task" [ 820.736306] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.743490] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': task-1354061, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.775060] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.775575] env[61629]: DEBUG nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 820.778323] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.774s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.779708] env[61629]: INFO nova.compute.claims [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 821.113124] env[61629]: DEBUG oslo_concurrency.lockutils [req-b036809e-1911-4b1b-98fe-5fe39ad5436d req-e00e361f-a4ad-44a4-bd5e-8110d696b601 service nova] Releasing lock "refresh_cache-76f08ac6-bb83-4d61-9707-b602028c54f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.113124] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "refresh_cache-76f08ac6-bb83-4d61-9707-b602028c54f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.113124] env[61629]: DEBUG nova.network.neutron [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 821.246225] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': task-1354061, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05541} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.246569] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 821.247456] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c27e21-715f-415a-8023-bba7741fbbc6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.268013] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] b8cfaef2-5f78-4026-90b8-fe2adacd61e0/b8cfaef2-5f78-4026-90b8-fe2adacd61e0.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 821.268013] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7b01eb2-54e2-40d0-ae8b-5456a09b20a1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.283606] env[61629]: DEBUG nova.compute.utils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 821.287414] env[61629]: DEBUG nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 821.287955] env[61629]: DEBUG nova.network.neutron [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 821.290895] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Waiting for the task: (returnval){ [ 821.290895] env[61629]: value = "task-1354062" [ 821.290895] env[61629]: _type = "Task" [ 821.290895] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.302828] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': task-1354062, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.338854] env[61629]: DEBUG nova.policy [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bdffb48ef3e14d7994bb9709b1ce3987', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a35cec60cf464a1c9f8215dbc6403a84', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 821.631104] env[61629]: DEBUG nova.network.neutron [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 821.650787] env[61629]: DEBUG nova.network.neutron [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Successfully created port: cb3c3178-0348-4ebc-9069-c2307b76759c {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 821.732994] env[61629]: DEBUG nova.network.neutron [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.786686] env[61629]: DEBUG nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 821.801510] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': task-1354062, 'name': ReconfigVM_Task, 'duration_secs': 0.293315} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.801839] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Reconfigured VM instance instance-0000003b to attach disk [datastore2] b8cfaef2-5f78-4026-90b8-fe2adacd61e0/b8cfaef2-5f78-4026-90b8-fe2adacd61e0.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 821.802552] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17dbe8b7-f49d-4556-a053-661d603c4cc0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.810033] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Waiting for the task: (returnval){ [ 821.810033] env[61629]: value = "task-1354063" [ 821.810033] env[61629]: _type = "Task" [ 821.810033] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.821581] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': task-1354063, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.027013] env[61629]: DEBUG nova.compute.manager [req-b90af471-6397-4bfc-b571-71ab76b125bb req-87091edd-528e-43d9-b910-4412e8c2c806 service nova] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Received event network-vif-deleted-4f397c05-0f7d-4b83-a52e-ffc278afadf8 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 822.062962] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29af8fd7-4ee5-41e5-9bef-707b5c872829 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.070725] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f48ba488-5db2-4276-98ee-72bcf53e6624 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.101243] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6455c51-c752-47b6-9aac-7fb967ddc170 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.108166] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc57738c-09cd-48ea-8858-0bcd18d1d5fd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.125048] env[61629]: DEBUG nova.compute.provider_tree [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.236567] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "refresh_cache-76f08ac6-bb83-4d61-9707-b602028c54f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.238530] env[61629]: DEBUG nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 822.238772] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 822.239116] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-abc61eea-109b-42c8-904f-ac2c0d6aac00 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.248479] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0fcac3-9a06-40c3-bdfb-67b841bb3989 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.271492] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 76f08ac6-bb83-4d61-9707-b602028c54f2 could not be found. [ 822.271717] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 822.271977] env[61629]: INFO nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Took 0.03 seconds to destroy the instance on the hypervisor. [ 822.272276] env[61629]: DEBUG oslo.service.loopingcall [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 822.272509] env[61629]: DEBUG nova.compute.manager [-] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 822.272605] env[61629]: DEBUG nova.network.neutron [-] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 822.288943] env[61629]: DEBUG nova.network.neutron [-] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.322846] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': task-1354063, 'name': Rename_Task, 'duration_secs': 0.123799} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.322846] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 822.323018] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f79f225e-9e8b-4acd-b974-18bedbd10bd4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.329040] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Waiting for the task: (returnval){ [ 822.329040] env[61629]: value = "task-1354064" [ 822.329040] env[61629]: _type = "Task" [ 822.329040] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.338253] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': task-1354064, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.620115] env[61629]: ERROR nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cb3c3178-0348-4ebc-9069-c2307b76759c, please check neutron logs for more information. [ 822.620115] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 822.620115] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 822.620115] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 822.620115] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 822.620115] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 822.620115] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 822.620115] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 822.620115] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 822.620115] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 822.620115] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 822.620115] env[61629]: ERROR nova.compute.manager raise self.value [ 822.620115] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 822.620115] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 822.620115] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 822.620115] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 822.620600] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 822.620600] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 822.620600] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cb3c3178-0348-4ebc-9069-c2307b76759c, please check neutron logs for more information. [ 822.620600] env[61629]: ERROR nova.compute.manager [ 822.620712] env[61629]: Traceback (most recent call last): [ 822.620770] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 822.620770] env[61629]: listener.cb(fileno) [ 822.620770] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 822.620770] env[61629]: result = function(*args, **kwargs) [ 822.620770] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 822.620770] env[61629]: return func(*args, **kwargs) [ 822.620770] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 822.620770] env[61629]: raise e [ 822.620770] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 822.620770] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 822.621019] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 822.621019] env[61629]: created_port_ids = self._update_ports_for_instance( [ 822.621019] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 822.621019] env[61629]: with excutils.save_and_reraise_exception(): [ 822.621019] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 822.621019] env[61629]: self.force_reraise() [ 822.621019] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 822.621019] env[61629]: raise self.value [ 822.621019] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 822.621019] env[61629]: updated_port = self._update_port( [ 822.621019] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 822.621019] env[61629]: _ensure_no_port_binding_failure(port) [ 822.621019] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 822.621019] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 822.621019] env[61629]: nova.exception.PortBindingFailed: Binding failed for port cb3c3178-0348-4ebc-9069-c2307b76759c, please check neutron logs for more information. [ 822.621019] env[61629]: Removing descriptor: 21 [ 822.632099] env[61629]: DEBUG nova.scheduler.client.report [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 822.792133] env[61629]: DEBUG nova.network.neutron [-] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.796604] env[61629]: DEBUG nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 822.821146] env[61629]: DEBUG nova.virt.hardware [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 822.821410] env[61629]: DEBUG nova.virt.hardware [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 822.821566] env[61629]: DEBUG nova.virt.hardware [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 822.821831] env[61629]: DEBUG nova.virt.hardware [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 822.821981] env[61629]: DEBUG nova.virt.hardware [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 822.822163] env[61629]: DEBUG nova.virt.hardware [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 822.822429] env[61629]: DEBUG nova.virt.hardware [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 822.822593] env[61629]: DEBUG nova.virt.hardware [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 822.822761] env[61629]: DEBUG nova.virt.hardware [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 822.822921] env[61629]: DEBUG nova.virt.hardware [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 822.823105] env[61629]: DEBUG nova.virt.hardware [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 822.824184] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cde421-9f62-4d73-af8c-c9653c13eeef {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.834899] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d6933f-347e-435e-87b3-987cc198ab00 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.843139] env[61629]: DEBUG oslo_vmware.api [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': task-1354064, 'name': PowerOnVM_Task, 'duration_secs': 0.388093} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.850598] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 822.850808] env[61629]: INFO nova.compute.manager [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Took 4.84 seconds to spawn the instance on the hypervisor. [ 822.850993] env[61629]: DEBUG nova.compute.manager [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 822.851911] env[61629]: ERROR nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cb3c3178-0348-4ebc-9069-c2307b76759c, please check neutron logs for more information. [ 822.851911] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Traceback (most recent call last): [ 822.851911] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 822.851911] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] yield resources [ 822.851911] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 822.851911] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] self.driver.spawn(context, instance, image_meta, [ 822.851911] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 822.851911] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 822.851911] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 822.851911] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] vm_ref = self.build_virtual_machine(instance, [ 822.851911] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 822.852235] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] vif_infos = vmwarevif.get_vif_info(self._session, [ 822.852235] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 822.852235] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] for vif in network_info: [ 822.852235] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 822.852235] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] return self._sync_wrapper(fn, *args, **kwargs) [ 822.852235] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 822.852235] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] self.wait() [ 822.852235] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 822.852235] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] self[:] = self._gt.wait() [ 822.852235] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 822.852235] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] return self._exit_event.wait() [ 822.852235] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 822.852235] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] current.throw(*self._exc) [ 822.852578] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 822.852578] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] result = function(*args, **kwargs) [ 822.852578] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 822.852578] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] return func(*args, **kwargs) [ 822.852578] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 822.852578] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] raise e [ 822.852578] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 822.852578] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] nwinfo = self.network_api.allocate_for_instance( [ 822.852578] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 822.852578] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] created_port_ids = self._update_ports_for_instance( [ 822.852578] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 822.852578] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] with excutils.save_and_reraise_exception(): [ 822.852578] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 822.852931] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] self.force_reraise() [ 822.852931] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 822.852931] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] raise self.value [ 822.852931] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 822.852931] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] updated_port = self._update_port( [ 822.852931] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 822.852931] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] _ensure_no_port_binding_failure(port) [ 822.852931] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 822.852931] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] raise exception.PortBindingFailed(port_id=port['id']) [ 822.852931] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] nova.exception.PortBindingFailed: Binding failed for port cb3c3178-0348-4ebc-9069-c2307b76759c, please check neutron logs for more information. [ 822.852931] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] [ 822.852931] env[61629]: INFO nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Terminating instance [ 822.854142] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c64fba1-d5ab-4c0e-8c5d-5428ccdc0126 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.856660] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "refresh_cache-3cabd3ef-590a-41f3-a611-3d27b4853db5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.856822] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquired lock "refresh_cache-3cabd3ef-590a-41f3-a611-3d27b4853db5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.856984] env[61629]: DEBUG nova.network.neutron [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 823.137014] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.359s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.137527] env[61629]: DEBUG nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 823.140077] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.812s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.294832] env[61629]: INFO nova.compute.manager [-] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Took 1.02 seconds to deallocate network for instance. [ 823.297356] env[61629]: DEBUG nova.compute.claims [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 823.297526] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.371989] env[61629]: INFO nova.compute.manager [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Took 35.93 seconds to build instance. [ 823.375679] env[61629]: DEBUG nova.network.neutron [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.473650] env[61629]: DEBUG nova.network.neutron [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.649237] env[61629]: DEBUG nova.compute.utils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 823.651455] env[61629]: DEBUG nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 823.652079] env[61629]: DEBUG nova.network.neutron [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 823.706085] env[61629]: DEBUG nova.policy [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be81178f7a914988a54581c283e2e76a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6d1f876ee054beb89ca0eb0776ddcd5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 823.874870] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32c9aedf-7862-4d66-855a-ca8d9ec98b07 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Lock "b8cfaef2-5f78-4026-90b8-fe2adacd61e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.710s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.923073] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9cacb8d-9cef-48e2-bedf-1805d75be63d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.931389] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a931a6-9f13-41c4-bdea-a68dd1f36594 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.962802] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d656e80-ffc1-44ee-b536-b903d95dc283 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.970107] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34adfb23-8f2e-43f1-8aff-d23929f3cf63 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.982775] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Releasing lock "refresh_cache-3cabd3ef-590a-41f3-a611-3d27b4853db5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.983215] env[61629]: DEBUG nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 823.983425] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 823.983887] env[61629]: DEBUG nova.compute.provider_tree [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.985212] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e68c9ae8-19eb-440d-826d-cccb12be1ad1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.994547] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0882e1d6-5c88-4843-af52-0fcf4c75a491 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.007548] env[61629]: DEBUG nova.network.neutron [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Successfully created port: cd21a5cd-a27c-454a-ac3b-e137434d322b {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 824.021028] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3cabd3ef-590a-41f3-a611-3d27b4853db5 could not be found. [ 824.021166] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 824.021352] env[61629]: INFO nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 824.021593] env[61629]: DEBUG oslo.service.loopingcall [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 824.021815] env[61629]: DEBUG nova.compute.manager [-] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 824.021910] env[61629]: DEBUG nova.network.neutron [-] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 824.036930] env[61629]: DEBUG nova.network.neutron [-] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.054098] env[61629]: DEBUG nova.compute.manager [req-d20ad6f9-6f34-46b3-9703-36cd93a4cca9 req-28e5cdcc-bf64-4292-9cee-94cca139891e service nova] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Received event network-changed-cb3c3178-0348-4ebc-9069-c2307b76759c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 824.054619] env[61629]: DEBUG nova.compute.manager [req-d20ad6f9-6f34-46b3-9703-36cd93a4cca9 req-28e5cdcc-bf64-4292-9cee-94cca139891e service nova] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Refreshing instance network info cache due to event network-changed-cb3c3178-0348-4ebc-9069-c2307b76759c. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 824.054906] env[61629]: DEBUG oslo_concurrency.lockutils [req-d20ad6f9-6f34-46b3-9703-36cd93a4cca9 req-28e5cdcc-bf64-4292-9cee-94cca139891e service nova] Acquiring lock "refresh_cache-3cabd3ef-590a-41f3-a611-3d27b4853db5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.055227] env[61629]: DEBUG oslo_concurrency.lockutils [req-d20ad6f9-6f34-46b3-9703-36cd93a4cca9 req-28e5cdcc-bf64-4292-9cee-94cca139891e service nova] Acquired lock "refresh_cache-3cabd3ef-590a-41f3-a611-3d27b4853db5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.055426] env[61629]: DEBUG nova.network.neutron [req-d20ad6f9-6f34-46b3-9703-36cd93a4cca9 req-28e5cdcc-bf64-4292-9cee-94cca139891e service nova] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Refreshing network info cache for port cb3c3178-0348-4ebc-9069-c2307b76759c {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 824.075550] env[61629]: DEBUG nova.compute.manager [None req-16ea19b9-1b03-4e2e-b2bb-94368d4dcb03 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 824.077258] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df34dd29-7fad-47d6-ac99-6f6dbe08524d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.134765] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Acquiring lock "b8cfaef2-5f78-4026-90b8-fe2adacd61e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.135058] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Lock "b8cfaef2-5f78-4026-90b8-fe2adacd61e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.135272] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Acquiring lock "b8cfaef2-5f78-4026-90b8-fe2adacd61e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.135455] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Lock "b8cfaef2-5f78-4026-90b8-fe2adacd61e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.135624] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Lock "b8cfaef2-5f78-4026-90b8-fe2adacd61e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.137781] env[61629]: INFO nova.compute.manager [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Terminating instance [ 824.139432] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Acquiring lock "refresh_cache-b8cfaef2-5f78-4026-90b8-fe2adacd61e0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.139591] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Acquired lock "refresh_cache-b8cfaef2-5f78-4026-90b8-fe2adacd61e0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.139755] env[61629]: DEBUG nova.network.neutron [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 824.152627] env[61629]: DEBUG nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 824.376283] env[61629]: DEBUG nova.compute.manager [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 824.488894] env[61629]: DEBUG nova.scheduler.client.report [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 824.541366] env[61629]: DEBUG nova.network.neutron [-] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.574143] env[61629]: DEBUG nova.network.neutron [req-d20ad6f9-6f34-46b3-9703-36cd93a4cca9 req-28e5cdcc-bf64-4292-9cee-94cca139891e service nova] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.588358] env[61629]: INFO nova.compute.manager [None req-16ea19b9-1b03-4e2e-b2bb-94368d4dcb03 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] instance snapshotting [ 824.588358] env[61629]: DEBUG nova.objects.instance [None req-16ea19b9-1b03-4e2e-b2bb-94368d4dcb03 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Lazy-loading 'flavor' on Instance uuid b8cfaef2-5f78-4026-90b8-fe2adacd61e0 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 824.661239] env[61629]: DEBUG nova.network.neutron [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.670138] env[61629]: DEBUG nova.network.neutron [req-d20ad6f9-6f34-46b3-9703-36cd93a4cca9 req-28e5cdcc-bf64-4292-9cee-94cca139891e service nova] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.761809] env[61629]: DEBUG nova.network.neutron [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.899378] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.969865] env[61629]: ERROR nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cd21a5cd-a27c-454a-ac3b-e137434d322b, please check neutron logs for more information. [ 824.969865] env[61629]: ERROR nova.compute.manager Traceback (most recent call last): [ 824.969865] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 824.969865] env[61629]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 824.969865] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 824.969865] env[61629]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 824.969865] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 824.969865] env[61629]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 824.969865] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 824.969865] env[61629]: ERROR nova.compute.manager self.force_reraise() [ 824.969865] env[61629]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 824.969865] env[61629]: ERROR nova.compute.manager raise self.value [ 824.969865] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 824.969865] env[61629]: ERROR nova.compute.manager updated_port = self._update_port( [ 824.969865] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 824.969865] env[61629]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 824.970359] env[61629]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 824.970359] env[61629]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 824.970359] env[61629]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cd21a5cd-a27c-454a-ac3b-e137434d322b, please check neutron logs for more information. [ 824.970359] env[61629]: ERROR nova.compute.manager [ 824.970359] env[61629]: Traceback (most recent call last): [ 824.970359] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 824.970359] env[61629]: listener.cb(fileno) [ 824.970359] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 824.970359] env[61629]: result = function(*args, **kwargs) [ 824.970359] env[61629]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 824.970359] env[61629]: return func(*args, **kwargs) [ 824.970359] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 824.970359] env[61629]: raise e [ 824.970359] env[61629]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 824.970359] env[61629]: nwinfo = self.network_api.allocate_for_instance( [ 824.970359] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 824.970359] env[61629]: created_port_ids = self._update_ports_for_instance( [ 824.970359] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 824.970359] env[61629]: with excutils.save_and_reraise_exception(): [ 824.970359] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 824.970359] env[61629]: self.force_reraise() [ 824.970359] env[61629]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 824.970359] env[61629]: raise self.value [ 824.970359] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 824.970359] env[61629]: updated_port = self._update_port( [ 824.970359] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 824.970359] env[61629]: _ensure_no_port_binding_failure(port) [ 824.970359] env[61629]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 824.970359] env[61629]: raise exception.PortBindingFailed(port_id=port['id']) [ 824.971137] env[61629]: nova.exception.PortBindingFailed: Binding failed for port cd21a5cd-a27c-454a-ac3b-e137434d322b, please check neutron logs for more information. [ 824.971137] env[61629]: Removing descriptor: 21 [ 824.994655] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.854s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.994793] env[61629]: ERROR nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e42b573e-58a2-49f8-ada0-d3ef74259470, please check neutron logs for more information. [ 824.994793] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Traceback (most recent call last): [ 824.994793] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 824.994793] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] self.driver.spawn(context, instance, image_meta, [ 824.994793] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 824.994793] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 824.994793] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 824.994793] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] vm_ref = self.build_virtual_machine(instance, [ 824.994793] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 824.994793] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] vif_infos = vmwarevif.get_vif_info(self._session, [ 824.994793] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 824.995145] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] for vif in network_info: [ 824.995145] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 824.995145] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] return self._sync_wrapper(fn, *args, **kwargs) [ 824.995145] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 824.995145] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] self.wait() [ 824.995145] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 824.995145] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] self[:] = self._gt.wait() [ 824.995145] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 824.995145] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] return self._exit_event.wait() [ 824.995145] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 824.995145] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] current.throw(*self._exc) [ 824.995145] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 824.995145] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] result = function(*args, **kwargs) [ 824.995492] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 824.995492] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] return func(*args, **kwargs) [ 824.995492] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 824.995492] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] raise e [ 824.995492] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 824.995492] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] nwinfo = self.network_api.allocate_for_instance( [ 824.995492] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 824.995492] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] created_port_ids = self._update_ports_for_instance( [ 824.995492] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 824.995492] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] with excutils.save_and_reraise_exception(): [ 824.995492] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 824.995492] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] self.force_reraise() [ 824.995492] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 824.995859] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] raise self.value [ 824.995859] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 824.995859] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] updated_port = self._update_port( [ 824.995859] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 824.995859] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] _ensure_no_port_binding_failure(port) [ 824.995859] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 824.995859] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] raise exception.PortBindingFailed(port_id=port['id']) [ 824.995859] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] nova.exception.PortBindingFailed: Binding failed for port e42b573e-58a2-49f8-ada0-d3ef74259470, please check neutron logs for more information. [ 824.995859] env[61629]: ERROR nova.compute.manager [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] [ 824.995859] env[61629]: DEBUG nova.compute.utils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Binding failed for port e42b573e-58a2-49f8-ada0-d3ef74259470, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 824.996754] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.571s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.999565] env[61629]: DEBUG nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Build of instance fe6adbf6-be78-45ee-a136-b7e538fb124b was re-scheduled: Binding failed for port e42b573e-58a2-49f8-ada0-d3ef74259470, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 824.999959] env[61629]: DEBUG nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 825.000199] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Acquiring lock "refresh_cache-fe6adbf6-be78-45ee-a136-b7e538fb124b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.000342] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Acquired lock "refresh_cache-fe6adbf6-be78-45ee-a136-b7e538fb124b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.000497] env[61629]: DEBUG nova.network.neutron [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 825.042332] env[61629]: INFO nova.compute.manager [-] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Took 1.02 seconds to deallocate network for instance. [ 825.045153] env[61629]: DEBUG nova.compute.claims [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 825.045342] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.093757] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fee20ae-f432-4263-a1e5-abba5759b8a3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.111877] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26ee34b-069b-4645-9933-c1dd1c2a09a5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.161065] env[61629]: DEBUG nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 825.172498] env[61629]: DEBUG oslo_concurrency.lockutils [req-d20ad6f9-6f34-46b3-9703-36cd93a4cca9 req-28e5cdcc-bf64-4292-9cee-94cca139891e service nova] Releasing lock "refresh_cache-3cabd3ef-590a-41f3-a611-3d27b4853db5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.172802] env[61629]: DEBUG nova.compute.manager [req-d20ad6f9-6f34-46b3-9703-36cd93a4cca9 req-28e5cdcc-bf64-4292-9cee-94cca139891e service nova] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Received event network-vif-deleted-cb3c3178-0348-4ebc-9069-c2307b76759c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 825.187207] env[61629]: DEBUG nova.virt.hardware [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 825.187506] env[61629]: DEBUG nova.virt.hardware [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 825.187698] env[61629]: DEBUG nova.virt.hardware [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.187854] env[61629]: DEBUG nova.virt.hardware [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 825.187994] env[61629]: DEBUG nova.virt.hardware [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.188185] env[61629]: DEBUG nova.virt.hardware [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 825.188406] env[61629]: DEBUG nova.virt.hardware [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 825.188578] env[61629]: DEBUG nova.virt.hardware [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 825.188762] env[61629]: DEBUG nova.virt.hardware [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 825.188944] env[61629]: DEBUG nova.virt.hardware [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 825.189146] env[61629]: DEBUG nova.virt.hardware [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 825.190107] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88d16eb-b866-4852-9b2f-0119edfe7a17 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.198917] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db946d07-34d5-42f5-86f3-ee391e3caa6e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.214929] env[61629]: ERROR nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cd21a5cd-a27c-454a-ac3b-e137434d322b, please check neutron logs for more information. [ 825.214929] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Traceback (most recent call last): [ 825.214929] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 825.214929] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] yield resources [ 825.214929] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 825.214929] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] self.driver.spawn(context, instance, image_meta, [ 825.214929] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 825.214929] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 825.214929] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 825.214929] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] vm_ref = self.build_virtual_machine(instance, [ 825.214929] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 825.215487] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] vif_infos = vmwarevif.get_vif_info(self._session, [ 825.215487] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 825.215487] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] for vif in network_info: [ 825.215487] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 825.215487] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] return self._sync_wrapper(fn, *args, **kwargs) [ 825.215487] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 825.215487] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] self.wait() [ 825.215487] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 825.215487] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] self[:] = self._gt.wait() [ 825.215487] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 825.215487] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] return self._exit_event.wait() [ 825.215487] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 825.215487] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] current.throw(*self._exc) [ 825.215977] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 825.215977] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] result = function(*args, **kwargs) [ 825.215977] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 825.215977] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] return func(*args, **kwargs) [ 825.215977] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 825.215977] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] raise e [ 825.215977] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 825.215977] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] nwinfo = self.network_api.allocate_for_instance( [ 825.215977] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 825.215977] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] created_port_ids = self._update_ports_for_instance( [ 825.215977] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 825.215977] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] with excutils.save_and_reraise_exception(): [ 825.215977] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 825.216348] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] self.force_reraise() [ 825.216348] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 825.216348] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] raise self.value [ 825.216348] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 825.216348] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] updated_port = self._update_port( [ 825.216348] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 825.216348] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] _ensure_no_port_binding_failure(port) [ 825.216348] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 825.216348] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] raise exception.PortBindingFailed(port_id=port['id']) [ 825.216348] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] nova.exception.PortBindingFailed: Binding failed for port cd21a5cd-a27c-454a-ac3b-e137434d322b, please check neutron logs for more information. [ 825.216348] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] [ 825.216348] env[61629]: INFO nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Terminating instance [ 825.217684] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "refresh_cache-c1bb3820-0c77-4a7e-bcce-17d5e6793ab9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.217862] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "refresh_cache-c1bb3820-0c77-4a7e-bcce-17d5e6793ab9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.218067] env[61629]: DEBUG nova.network.neutron [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 825.265497] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Releasing lock "refresh_cache-b8cfaef2-5f78-4026-90b8-fe2adacd61e0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.265974] env[61629]: DEBUG nova.compute.manager [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 825.266234] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 825.267157] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8004914-3d00-4084-a994-62c60d1bf78a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.274898] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 825.275186] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-481102a2-b25d-4431-a3f1-7b0a40d07435 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.281136] env[61629]: DEBUG oslo_vmware.api [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Waiting for the task: (returnval){ [ 825.281136] env[61629]: value = "task-1354065" [ 825.281136] env[61629]: _type = "Task" [ 825.281136] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.289147] env[61629]: DEBUG oslo_vmware.api [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': task-1354065, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.524251] env[61629]: DEBUG nova.network.neutron [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 825.621441] env[61629]: DEBUG nova.compute.manager [None req-16ea19b9-1b03-4e2e-b2bb-94368d4dcb03 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Instance disappeared during snapshot {{(pid=61629) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 825.626479] env[61629]: DEBUG nova.network.neutron [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.735668] env[61629]: DEBUG nova.network.neutron [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 825.786024] env[61629]: DEBUG nova.compute.manager [None req-16ea19b9-1b03-4e2e-b2bb-94368d4dcb03 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Found 0 images (rotation: 2) {{(pid=61629) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 825.797121] env[61629]: DEBUG oslo_vmware.api [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': task-1354065, 'name': PowerOffVM_Task, 'duration_secs': 0.116809} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.799776] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 825.799971] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 825.800453] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc469a2b-37d1-4995-95e5-31a41051416f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.827112] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 825.827112] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 825.827112] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Deleting the datastore file [datastore2] b8cfaef2-5f78-4026-90b8-fe2adacd61e0 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 825.827112] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1982719-d6ea-4c90-8381-d7e0a3a4e77b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.833405] env[61629]: DEBUG oslo_vmware.api [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Waiting for the task: (returnval){ [ 825.833405] env[61629]: value = "task-1354067" [ 825.833405] env[61629]: _type = "Task" [ 825.833405] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.850591] env[61629]: DEBUG oslo_vmware.api [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': task-1354067, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.854415] env[61629]: DEBUG nova.network.neutron [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.867830] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7bf7c73-feab-4a53-a78c-5f91928f8f1a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.875791] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa95e0a-5aae-4c7d-acd9-1e3350725fc2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.911978] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d900bce-3b96-4554-bf71-495bc3d2220e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.918334] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037475c0-34f4-416b-b7c2-9f9540273af7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.933418] env[61629]: DEBUG nova.compute.provider_tree [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.084056] env[61629]: DEBUG nova.compute.manager [req-08945f88-c28b-4229-896a-27a4b414a4d6 req-29b05358-d319-4eed-8f6c-f71524d4f66e service nova] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Received event network-changed-cd21a5cd-a27c-454a-ac3b-e137434d322b {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 826.084274] env[61629]: DEBUG nova.compute.manager [req-08945f88-c28b-4229-896a-27a4b414a4d6 req-29b05358-d319-4eed-8f6c-f71524d4f66e service nova] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Refreshing instance network info cache due to event network-changed-cd21a5cd-a27c-454a-ac3b-e137434d322b. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 826.084462] env[61629]: DEBUG oslo_concurrency.lockutils [req-08945f88-c28b-4229-896a-27a4b414a4d6 req-29b05358-d319-4eed-8f6c-f71524d4f66e service nova] Acquiring lock "refresh_cache-c1bb3820-0c77-4a7e-bcce-17d5e6793ab9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.131304] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Releasing lock "refresh_cache-fe6adbf6-be78-45ee-a136-b7e538fb124b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.131612] env[61629]: DEBUG nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 826.131848] env[61629]: DEBUG nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 826.132076] env[61629]: DEBUG nova.network.neutron [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 826.147670] env[61629]: DEBUG nova.network.neutron [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 826.346591] env[61629]: DEBUG oslo_vmware.api [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Task: {'id': task-1354067, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099127} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.346922] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 826.347196] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 826.347451] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 826.347660] env[61629]: INFO nova.compute.manager [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Took 1.08 seconds to destroy the instance on the hypervisor. [ 826.347968] env[61629]: DEBUG oslo.service.loopingcall [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 826.348239] env[61629]: DEBUG nova.compute.manager [-] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 826.348404] env[61629]: DEBUG nova.network.neutron [-] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 826.357622] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "refresh_cache-c1bb3820-0c77-4a7e-bcce-17d5e6793ab9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.357983] env[61629]: DEBUG nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 826.358206] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 826.358481] env[61629]: DEBUG oslo_concurrency.lockutils [req-08945f88-c28b-4229-896a-27a4b414a4d6 req-29b05358-d319-4eed-8f6c-f71524d4f66e service nova] Acquired lock "refresh_cache-c1bb3820-0c77-4a7e-bcce-17d5e6793ab9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.358638] env[61629]: DEBUG nova.network.neutron [req-08945f88-c28b-4229-896a-27a4b414a4d6 req-29b05358-d319-4eed-8f6c-f71524d4f66e service nova] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Refreshing network info cache for port cd21a5cd-a27c-454a-ac3b-e137434d322b {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 826.359659] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d89fa44-a7c9-437d-b079-fabddfbd8c90 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.365246] env[61629]: DEBUG nova.network.neutron [-] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 826.378743] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c7f6ef-17a2-49f6-aca5-403d33f73023 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.408846] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c1bb3820-0c77-4a7e-bcce-17d5e6793ab9 could not be found. [ 826.409099] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 826.409285] env[61629]: INFO nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Took 0.05 seconds to destroy the instance on the hypervisor. [ 826.409532] env[61629]: DEBUG oslo.service.loopingcall [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 826.409756] env[61629]: DEBUG nova.compute.manager [-] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 826.409850] env[61629]: DEBUG nova.network.neutron [-] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 826.424980] env[61629]: DEBUG nova.network.neutron [-] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 826.438361] env[61629]: DEBUG nova.scheduler.client.report [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 826.650078] env[61629]: DEBUG nova.network.neutron [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.868766] env[61629]: DEBUG nova.network.neutron [-] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.881010] env[61629]: DEBUG nova.network.neutron [req-08945f88-c28b-4229-896a-27a4b414a4d6 req-29b05358-d319-4eed-8f6c-f71524d4f66e service nova] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 826.927445] env[61629]: DEBUG nova.network.neutron [-] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.942835] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.946s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.943495] env[61629]: ERROR nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7ca8eecb-5fce-4c3d-9b83-197d53c8f97d, please check neutron logs for more information. [ 826.943495] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Traceback (most recent call last): [ 826.943495] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 826.943495] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] self.driver.spawn(context, instance, image_meta, [ 826.943495] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 826.943495] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 826.943495] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 826.943495] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] vm_ref = self.build_virtual_machine(instance, [ 826.943495] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 826.943495] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] vif_infos = vmwarevif.get_vif_info(self._session, [ 826.943495] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 826.943805] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] for vif in network_info: [ 826.943805] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 826.943805] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] return self._sync_wrapper(fn, *args, **kwargs) [ 826.943805] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 826.943805] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] self.wait() [ 826.943805] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 826.943805] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] self[:] = self._gt.wait() [ 826.943805] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 826.943805] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] return self._exit_event.wait() [ 826.943805] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 826.943805] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] current.throw(*self._exc) [ 826.943805] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 826.943805] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] result = function(*args, **kwargs) [ 826.944112] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 826.944112] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] return func(*args, **kwargs) [ 826.944112] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 826.944112] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] raise e [ 826.944112] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 826.944112] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] nwinfo = self.network_api.allocate_for_instance( [ 826.944112] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 826.944112] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] created_port_ids = self._update_ports_for_instance( [ 826.944112] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 826.944112] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] with excutils.save_and_reraise_exception(): [ 826.944112] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 826.944112] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] self.force_reraise() [ 826.944112] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 826.944395] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] raise self.value [ 826.944395] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 826.944395] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] updated_port = self._update_port( [ 826.944395] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 826.944395] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] _ensure_no_port_binding_failure(port) [ 826.944395] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 826.944395] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] raise exception.PortBindingFailed(port_id=port['id']) [ 826.944395] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] nova.exception.PortBindingFailed: Binding failed for port 7ca8eecb-5fce-4c3d-9b83-197d53c8f97d, please check neutron logs for more information. [ 826.944395] env[61629]: ERROR nova.compute.manager [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] [ 826.944395] env[61629]: DEBUG nova.compute.utils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Binding failed for port 7ca8eecb-5fce-4c3d-9b83-197d53c8f97d, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 826.945391] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.234s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.946828] env[61629]: INFO nova.compute.claims [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.949675] env[61629]: DEBUG nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Build of instance 355aa564-3067-4a3c-92de-4ab6e2b8fa6b was re-scheduled: Binding failed for port 7ca8eecb-5fce-4c3d-9b83-197d53c8f97d, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 826.950065] env[61629]: DEBUG nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 826.950293] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Acquiring lock "refresh_cache-355aa564-3067-4a3c-92de-4ab6e2b8fa6b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.950440] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Acquired lock "refresh_cache-355aa564-3067-4a3c-92de-4ab6e2b8fa6b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.950595] env[61629]: DEBUG nova.network.neutron [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 826.978992] env[61629]: DEBUG nova.network.neutron [req-08945f88-c28b-4229-896a-27a4b414a4d6 req-29b05358-d319-4eed-8f6c-f71524d4f66e service nova] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.152664] env[61629]: INFO nova.compute.manager [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] [instance: fe6adbf6-be78-45ee-a136-b7e538fb124b] Took 1.02 seconds to deallocate network for instance. [ 827.373193] env[61629]: INFO nova.compute.manager [-] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Took 1.02 seconds to deallocate network for instance. [ 827.430507] env[61629]: INFO nova.compute.manager [-] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Took 1.02 seconds to deallocate network for instance. [ 827.432707] env[61629]: DEBUG nova.compute.claims [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Aborting claim: {{(pid=61629) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 827.432887] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.481773] env[61629]: DEBUG oslo_concurrency.lockutils [req-08945f88-c28b-4229-896a-27a4b414a4d6 req-29b05358-d319-4eed-8f6c-f71524d4f66e service nova] Releasing lock "refresh_cache-c1bb3820-0c77-4a7e-bcce-17d5e6793ab9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.482094] env[61629]: DEBUG nova.compute.manager [req-08945f88-c28b-4229-896a-27a4b414a4d6 req-29b05358-d319-4eed-8f6c-f71524d4f66e service nova] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Received event network-vif-deleted-cd21a5cd-a27c-454a-ac3b-e137434d322b {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 827.497083] env[61629]: DEBUG nova.network.neutron [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.566732] env[61629]: DEBUG nova.network.neutron [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.879702] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.069174] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Releasing lock "refresh_cache-355aa564-3067-4a3c-92de-4ab6e2b8fa6b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.069489] env[61629]: DEBUG nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 828.069716] env[61629]: DEBUG nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 828.069921] env[61629]: DEBUG nova.network.neutron [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 828.084996] env[61629]: DEBUG nova.network.neutron [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.178527] env[61629]: INFO nova.scheduler.client.report [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Deleted allocations for instance fe6adbf6-be78-45ee-a136-b7e538fb124b [ 828.194025] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-394dfcbc-fb00-4223-8d1d-b03ed9903c5d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.201960] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-563624e8-57c6-495d-a38a-462fd7cd0fa1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.232604] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e2ddef-755f-4cf8-88f1-2791d59c73a1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.239635] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ae1a4c-e0c9-4848-bab7-06bf26ef549e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.252430] env[61629]: DEBUG nova.compute.provider_tree [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.587344] env[61629]: DEBUG nova.network.neutron [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.687559] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4237dc11-1048-4973-b8fc-33faad12d109 tempest-ServersTestBootFromVolume-1229395824 tempest-ServersTestBootFromVolume-1229395824-project-member] Lock "fe6adbf6-be78-45ee-a136-b7e538fb124b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 159.406s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.755678] env[61629]: DEBUG nova.scheduler.client.report [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 829.091029] env[61629]: INFO nova.compute.manager [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] [instance: 355aa564-3067-4a3c-92de-4ab6e2b8fa6b] Took 1.02 seconds to deallocate network for instance. [ 829.189858] env[61629]: DEBUG nova.compute.manager [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 829.261052] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.315s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.261289] env[61629]: DEBUG nova.compute.manager [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 829.263755] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.876s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.714489] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.768837] env[61629]: DEBUG nova.compute.utils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 829.770705] env[61629]: DEBUG nova.compute.manager [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 829.770705] env[61629]: DEBUG nova.network.neutron [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 829.824618] env[61629]: DEBUG nova.policy [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b122825ec88f44ed834479f30cde698c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ef41f406d18447fbee4e7b7ae52a2d2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 830.064272] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b38590-e494-4dda-85df-ca1490a3db6f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.075095] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36fd7f69-ad49-48e7-ba1a-3cfd7f027e81 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.115319] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d8d9ca-f33b-4c5e-bc94-97d1bfb2ba4c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.123021] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae461645-0014-4953-8948-26671062c9a5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.139268] env[61629]: DEBUG nova.compute.provider_tree [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.152506] env[61629]: INFO nova.scheduler.client.report [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Deleted allocations for instance 355aa564-3067-4a3c-92de-4ab6e2b8fa6b [ 830.164264] env[61629]: DEBUG nova.network.neutron [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Successfully created port: ebc79844-43d9-4ea4-b9f1-aa5a92e010e5 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.275718] env[61629]: DEBUG nova.compute.manager [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 830.425135] env[61629]: DEBUG nova.network.neutron [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Successfully created port: 67f6d4ea-55d3-4da2-bace-df8324128740 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.646054] env[61629]: DEBUG nova.scheduler.client.report [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 830.670088] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d46c1dde-eaab-4324-98bf-e847817a7908 tempest-AttachInterfacesV270Test-369526430 tempest-AttachInterfacesV270Test-369526430-project-member] Lock "355aa564-3067-4a3c-92de-4ab6e2b8fa6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.343s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.754973] env[61629]: DEBUG nova.network.neutron [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Successfully created port: bf10a052-1802-41aa-9428-2bd89d22456d {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 831.150866] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.887s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.152632] env[61629]: ERROR nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 120c8916-737e-4203-9588-a312be54933c, please check neutron logs for more information. [ 831.152632] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Traceback (most recent call last): [ 831.152632] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 831.152632] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] self.driver.spawn(context, instance, image_meta, [ 831.152632] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 831.152632] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 831.152632] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 831.152632] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] vm_ref = self.build_virtual_machine(instance, [ 831.152632] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 831.152632] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] vif_infos = vmwarevif.get_vif_info(self._session, [ 831.152632] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 831.152962] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] for vif in network_info: [ 831.152962] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 831.152962] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] return self._sync_wrapper(fn, *args, **kwargs) [ 831.152962] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 831.152962] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] self.wait() [ 831.152962] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 831.152962] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] self[:] = self._gt.wait() [ 831.152962] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 831.152962] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] return self._exit_event.wait() [ 831.152962] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 831.152962] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] current.throw(*self._exc) [ 831.152962] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 831.152962] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] result = function(*args, **kwargs) [ 831.153298] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 831.153298] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] return func(*args, **kwargs) [ 831.153298] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 831.153298] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] raise e [ 831.153298] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 831.153298] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] nwinfo = self.network_api.allocate_for_instance( [ 831.153298] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 831.153298] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] created_port_ids = self._update_ports_for_instance( [ 831.153298] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 831.153298] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] with excutils.save_and_reraise_exception(): [ 831.153298] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 831.153298] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] self.force_reraise() [ 831.153298] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 831.157384] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] raise self.value [ 831.157384] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 831.157384] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] updated_port = self._update_port( [ 831.157384] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 831.157384] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] _ensure_no_port_binding_failure(port) [ 831.157384] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 831.157384] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] raise exception.PortBindingFailed(port_id=port['id']) [ 831.157384] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] nova.exception.PortBindingFailed: Binding failed for port 120c8916-737e-4203-9588-a312be54933c, please check neutron logs for more information. [ 831.157384] env[61629]: ERROR nova.compute.manager [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] [ 831.157384] env[61629]: DEBUG nova.compute.utils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Binding failed for port 120c8916-737e-4203-9588-a312be54933c, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 831.157684] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.127s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.158919] env[61629]: DEBUG nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Build of instance 28af8dc5-0817-43e7-bce0-3491971efb0c was re-scheduled: Binding failed for port 120c8916-737e-4203-9588-a312be54933c, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 831.159358] env[61629]: DEBUG nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 831.159595] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Acquiring lock "refresh_cache-28af8dc5-0817-43e7-bce0-3491971efb0c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.159774] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Acquired lock "refresh_cache-28af8dc5-0817-43e7-bce0-3491971efb0c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.159937] env[61629]: DEBUG nova.network.neutron [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.171442] env[61629]: DEBUG nova.compute.manager [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 831.287716] env[61629]: DEBUG nova.compute.manager [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 831.325667] env[61629]: DEBUG nova.virt.hardware [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 831.325844] env[61629]: DEBUG nova.virt.hardware [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 831.326072] env[61629]: DEBUG nova.virt.hardware [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.326326] env[61629]: DEBUG nova.virt.hardware [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 831.326527] env[61629]: DEBUG nova.virt.hardware [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.326729] env[61629]: DEBUG nova.virt.hardware [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 831.327011] env[61629]: DEBUG nova.virt.hardware [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 831.327251] env[61629]: DEBUG nova.virt.hardware [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 831.327466] env[61629]: DEBUG nova.virt.hardware [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 831.327683] env[61629]: DEBUG nova.virt.hardware [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 831.327896] env[61629]: DEBUG nova.virt.hardware [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 831.328838] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654f442b-e525-4f46-a851-b79a381f0c36 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.337179] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3742c38d-cb7d-42b7-a3ef-b2d5259095bc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.685075] env[61629]: DEBUG nova.network.neutron [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.694950] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.775631] env[61629]: DEBUG nova.network.neutron [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.969342] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb81100-b361-4fe9-a34e-452b4b607a2d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.980073] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0db2981-1370-4e3d-904e-7807b3d651e7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.015365] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d0d816-2cd8-4403-aa7c-9d8df53d718a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.024033] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f919d1b-c8cb-4236-b101-afdef368711c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.042287] env[61629]: DEBUG nova.compute.provider_tree [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.278813] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Releasing lock "refresh_cache-28af8dc5-0817-43e7-bce0-3491971efb0c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.279155] env[61629]: DEBUG nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 832.279357] env[61629]: DEBUG nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 832.279526] env[61629]: DEBUG nova.network.neutron [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 832.296970] env[61629]: DEBUG nova.network.neutron [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.471474] env[61629]: DEBUG nova.compute.manager [req-ceeba644-d335-4e9a-89d5-f67262b215ad req-4af897fd-5d45-4c7e-82e4-595546f21fcf service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Received event network-vif-plugged-ebc79844-43d9-4ea4-b9f1-aa5a92e010e5 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 832.471700] env[61629]: DEBUG oslo_concurrency.lockutils [req-ceeba644-d335-4e9a-89d5-f67262b215ad req-4af897fd-5d45-4c7e-82e4-595546f21fcf service nova] Acquiring lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.471938] env[61629]: DEBUG oslo_concurrency.lockutils [req-ceeba644-d335-4e9a-89d5-f67262b215ad req-4af897fd-5d45-4c7e-82e4-595546f21fcf service nova] Lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.472084] env[61629]: DEBUG oslo_concurrency.lockutils [req-ceeba644-d335-4e9a-89d5-f67262b215ad req-4af897fd-5d45-4c7e-82e4-595546f21fcf service nova] Lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.472256] env[61629]: DEBUG nova.compute.manager [req-ceeba644-d335-4e9a-89d5-f67262b215ad req-4af897fd-5d45-4c7e-82e4-595546f21fcf service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] No waiting events found dispatching network-vif-plugged-ebc79844-43d9-4ea4-b9f1-aa5a92e010e5 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 832.472422] env[61629]: WARNING nova.compute.manager [req-ceeba644-d335-4e9a-89d5-f67262b215ad req-4af897fd-5d45-4c7e-82e4-595546f21fcf service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Received unexpected event network-vif-plugged-ebc79844-43d9-4ea4-b9f1-aa5a92e010e5 for instance with vm_state building and task_state spawning. [ 832.548021] env[61629]: DEBUG nova.scheduler.client.report [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 832.611297] env[61629]: DEBUG nova.network.neutron [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Successfully updated port: ebc79844-43d9-4ea4-b9f1-aa5a92e010e5 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 832.800168] env[61629]: DEBUG nova.network.neutron [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.054126] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.896s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.054126] env[61629]: ERROR nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bc671ae6-2cb4-46b5-8289-516f2007bc6b, please check neutron logs for more information. [ 833.054126] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Traceback (most recent call last): [ 833.054126] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 833.054126] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] self.driver.spawn(context, instance, image_meta, [ 833.054126] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 833.054126] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 833.054126] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 833.054126] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] vm_ref = self.build_virtual_machine(instance, [ 833.054416] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 833.054416] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] vif_infos = vmwarevif.get_vif_info(self._session, [ 833.054416] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 833.054416] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] for vif in network_info: [ 833.054416] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 833.054416] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] return self._sync_wrapper(fn, *args, **kwargs) [ 833.054416] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 833.054416] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] self.wait() [ 833.054416] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 833.054416] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] self[:] = self._gt.wait() [ 833.054416] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 833.054416] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] return self._exit_event.wait() [ 833.054416] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 833.054763] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] current.throw(*self._exc) [ 833.054763] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 833.054763] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] result = function(*args, **kwargs) [ 833.054763] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 833.054763] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] return func(*args, **kwargs) [ 833.054763] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 833.054763] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] raise e [ 833.054763] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 833.054763] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] nwinfo = self.network_api.allocate_for_instance( [ 833.054763] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 833.054763] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] created_port_ids = self._update_ports_for_instance( [ 833.054763] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 833.054763] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] with excutils.save_and_reraise_exception(): [ 833.055152] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 833.055152] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] self.force_reraise() [ 833.055152] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 833.055152] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] raise self.value [ 833.055152] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 833.055152] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] updated_port = self._update_port( [ 833.055152] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 833.055152] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] _ensure_no_port_binding_failure(port) [ 833.055152] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 833.055152] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] raise exception.PortBindingFailed(port_id=port['id']) [ 833.055152] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] nova.exception.PortBindingFailed: Binding failed for port bc671ae6-2cb4-46b5-8289-516f2007bc6b, please check neutron logs for more information. [ 833.055152] env[61629]: ERROR nova.compute.manager [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] [ 833.055464] env[61629]: DEBUG nova.compute.utils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Binding failed for port bc671ae6-2cb4-46b5-8289-516f2007bc6b, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 833.055464] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.696s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.057335] env[61629]: INFO nova.compute.claims [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.065895] env[61629]: DEBUG nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Build of instance c5c6854c-1fe6-46e7-aee7-6a5e00d6027c was re-scheduled: Binding failed for port bc671ae6-2cb4-46b5-8289-516f2007bc6b, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 833.065895] env[61629]: DEBUG nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 833.065895] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "refresh_cache-c5c6854c-1fe6-46e7-aee7-6a5e00d6027c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.065895] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquired lock "refresh_cache-c5c6854c-1fe6-46e7-aee7-6a5e00d6027c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.065895] env[61629]: DEBUG nova.network.neutron [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 833.303243] env[61629]: INFO nova.compute.manager [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] [instance: 28af8dc5-0817-43e7-bce0-3491971efb0c] Took 1.02 seconds to deallocate network for instance. [ 833.587762] env[61629]: DEBUG nova.network.neutron [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 833.843345] env[61629]: DEBUG nova.network.neutron [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.336565] env[61629]: INFO nova.scheduler.client.report [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Deleted allocations for instance 28af8dc5-0817-43e7-bce0-3491971efb0c [ 834.346622] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Releasing lock "refresh_cache-c5c6854c-1fe6-46e7-aee7-6a5e00d6027c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.347343] env[61629]: DEBUG nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 834.347613] env[61629]: DEBUG nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 834.347902] env[61629]: DEBUG nova.network.neutron [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 834.367623] env[61629]: DEBUG nova.network.neutron [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 834.404382] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8237587f-55b9-4a0e-b5cc-40b7c7a1bb2d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.413331] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf0db0d-0deb-4f09-b93e-55cd79c1a1c2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.445966] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41233b4f-a2d9-414b-95b2-3ce54d51fbfa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.453820] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3459ecda-14e7-490e-aadf-6dba5f4f54e7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.466981] env[61629]: DEBUG nova.compute.provider_tree [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.495480] env[61629]: DEBUG nova.compute.manager [req-7054fa2e-c697-4275-a88d-04a038093cd1 req-71eeaef1-5261-4b18-964b-3d46bdc041cd service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Received event network-changed-ebc79844-43d9-4ea4-b9f1-aa5a92e010e5 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 834.495676] env[61629]: DEBUG nova.compute.manager [req-7054fa2e-c697-4275-a88d-04a038093cd1 req-71eeaef1-5261-4b18-964b-3d46bdc041cd service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Refreshing instance network info cache due to event network-changed-ebc79844-43d9-4ea4-b9f1-aa5a92e010e5. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 834.495893] env[61629]: DEBUG oslo_concurrency.lockutils [req-7054fa2e-c697-4275-a88d-04a038093cd1 req-71eeaef1-5261-4b18-964b-3d46bdc041cd service nova] Acquiring lock "refresh_cache-dce0c7e1-1e47-49ad-88f7-f8f5e293d239" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.496048] env[61629]: DEBUG oslo_concurrency.lockutils [req-7054fa2e-c697-4275-a88d-04a038093cd1 req-71eeaef1-5261-4b18-964b-3d46bdc041cd service nova] Acquired lock "refresh_cache-dce0c7e1-1e47-49ad-88f7-f8f5e293d239" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.496214] env[61629]: DEBUG nova.network.neutron [req-7054fa2e-c697-4275-a88d-04a038093cd1 req-71eeaef1-5261-4b18-964b-3d46bdc041cd service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Refreshing network info cache for port ebc79844-43d9-4ea4-b9f1-aa5a92e010e5 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.808992] env[61629]: DEBUG nova.network.neutron [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Successfully updated port: 67f6d4ea-55d3-4da2-bace-df8324128740 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 834.852108] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ea7e88cd-e10d-4ff2-b79e-c0d40bc1da25 tempest-ServerActionsTestOtherB-381949915 tempest-ServerActionsTestOtherB-381949915-project-member] Lock "28af8dc5-0817-43e7-bce0-3491971efb0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 149.516s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.870630] env[61629]: DEBUG nova.network.neutron [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.970266] env[61629]: DEBUG nova.scheduler.client.report [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 835.052233] env[61629]: DEBUG nova.network.neutron [req-7054fa2e-c697-4275-a88d-04a038093cd1 req-71eeaef1-5261-4b18-964b-3d46bdc041cd service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.156818] env[61629]: DEBUG nova.network.neutron [req-7054fa2e-c697-4275-a88d-04a038093cd1 req-71eeaef1-5261-4b18-964b-3d46bdc041cd service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.355171] env[61629]: DEBUG nova.compute.manager [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 835.376026] env[61629]: INFO nova.compute.manager [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: c5c6854c-1fe6-46e7-aee7-6a5e00d6027c] Took 1.03 seconds to deallocate network for instance. [ 835.474976] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.421s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.478111] env[61629]: DEBUG nova.compute.manager [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 835.480017] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.182s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.660352] env[61629]: DEBUG oslo_concurrency.lockutils [req-7054fa2e-c697-4275-a88d-04a038093cd1 req-71eeaef1-5261-4b18-964b-3d46bdc041cd service nova] Releasing lock "refresh_cache-dce0c7e1-1e47-49ad-88f7-f8f5e293d239" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.742467] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Acquiring lock "12c6b03b-8295-43de-898f-a6c35f1693b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.742709] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Lock "12c6b03b-8295-43de-898f-a6c35f1693b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.890821] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.981490] env[61629]: DEBUG nova.compute.utils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 835.983940] env[61629]: DEBUG nova.compute.manager [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 835.983940] env[61629]: DEBUG nova.network.neutron [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 836.055291] env[61629]: DEBUG nova.policy [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2bdcaacaf2034ff994ee2e8b0e5071b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cc5fe81fb0eb4820825cc8e97b8fe4f2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 836.296932] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e591c3-0eb5-483d-aa67-a8627999ec9b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.305168] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9693a218-4fe2-4c36-9581-90c0585436df {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.335413] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b75f121-4fe5-4235-b4fa-249e0a48d343 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.343027] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1735fc-7564-490d-916e-90eebdcf494c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.357917] env[61629]: DEBUG nova.compute.provider_tree [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.424171] env[61629]: INFO nova.scheduler.client.report [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Deleted allocations for instance c5c6854c-1fe6-46e7-aee7-6a5e00d6027c [ 836.432162] env[61629]: DEBUG nova.network.neutron [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Successfully created port: b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 836.489017] env[61629]: DEBUG nova.compute.manager [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 836.614446] env[61629]: DEBUG nova.compute.manager [req-40cea0a7-3e1b-41a3-8343-6b55e3e963fc req-35cefa34-735f-4982-9067-fea1ac76ecc0 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Received event network-vif-plugged-67f6d4ea-55d3-4da2-bace-df8324128740 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 836.614645] env[61629]: DEBUG oslo_concurrency.lockutils [req-40cea0a7-3e1b-41a3-8343-6b55e3e963fc req-35cefa34-735f-4982-9067-fea1ac76ecc0 service nova] Acquiring lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.614807] env[61629]: DEBUG oslo_concurrency.lockutils [req-40cea0a7-3e1b-41a3-8343-6b55e3e963fc req-35cefa34-735f-4982-9067-fea1ac76ecc0 service nova] Lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.615011] env[61629]: DEBUG oslo_concurrency.lockutils [req-40cea0a7-3e1b-41a3-8343-6b55e3e963fc req-35cefa34-735f-4982-9067-fea1ac76ecc0 service nova] Lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.615209] env[61629]: DEBUG nova.compute.manager [req-40cea0a7-3e1b-41a3-8343-6b55e3e963fc req-35cefa34-735f-4982-9067-fea1ac76ecc0 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] No waiting events found dispatching network-vif-plugged-67f6d4ea-55d3-4da2-bace-df8324128740 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 836.615389] env[61629]: WARNING nova.compute.manager [req-40cea0a7-3e1b-41a3-8343-6b55e3e963fc req-35cefa34-735f-4982-9067-fea1ac76ecc0 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Received unexpected event network-vif-plugged-67f6d4ea-55d3-4da2-bace-df8324128740 for instance with vm_state building and task_state spawning. [ 836.615550] env[61629]: DEBUG nova.compute.manager [req-40cea0a7-3e1b-41a3-8343-6b55e3e963fc req-35cefa34-735f-4982-9067-fea1ac76ecc0 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Received event network-changed-67f6d4ea-55d3-4da2-bace-df8324128740 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 836.616122] env[61629]: DEBUG nova.compute.manager [req-40cea0a7-3e1b-41a3-8343-6b55e3e963fc req-35cefa34-735f-4982-9067-fea1ac76ecc0 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Refreshing instance network info cache due to event network-changed-67f6d4ea-55d3-4da2-bace-df8324128740. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 836.616597] env[61629]: DEBUG oslo_concurrency.lockutils [req-40cea0a7-3e1b-41a3-8343-6b55e3e963fc req-35cefa34-735f-4982-9067-fea1ac76ecc0 service nova] Acquiring lock "refresh_cache-dce0c7e1-1e47-49ad-88f7-f8f5e293d239" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.616597] env[61629]: DEBUG oslo_concurrency.lockutils [req-40cea0a7-3e1b-41a3-8343-6b55e3e963fc req-35cefa34-735f-4982-9067-fea1ac76ecc0 service nova] Acquired lock "refresh_cache-dce0c7e1-1e47-49ad-88f7-f8f5e293d239" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.616745] env[61629]: DEBUG nova.network.neutron [req-40cea0a7-3e1b-41a3-8343-6b55e3e963fc req-35cefa34-735f-4982-9067-fea1ac76ecc0 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Refreshing network info cache for port 67f6d4ea-55d3-4da2-bace-df8324128740 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 836.861104] env[61629]: DEBUG nova.scheduler.client.report [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 836.936849] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a114d77e-d752-44ef-b076-4d8b1b071773 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "c5c6854c-1fe6-46e7-aee7-6a5e00d6027c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 132.471s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.144416] env[61629]: DEBUG nova.network.neutron [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Successfully updated port: bf10a052-1802-41aa-9428-2bd89d22456d {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 837.179217] env[61629]: DEBUG nova.network.neutron [req-40cea0a7-3e1b-41a3-8343-6b55e3e963fc req-35cefa34-735f-4982-9067-fea1ac76ecc0 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.371027] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.888s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.371027] env[61629]: ERROR nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4f397c05-0f7d-4b83-a52e-ffc278afadf8, please check neutron logs for more information. [ 837.371027] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Traceback (most recent call last): [ 837.371027] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 837.371027] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] self.driver.spawn(context, instance, image_meta, [ 837.371027] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 837.371027] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 837.371027] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 837.371027] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] vm_ref = self.build_virtual_machine(instance, [ 837.371397] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 837.371397] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] vif_infos = vmwarevif.get_vif_info(self._session, [ 837.371397] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 837.371397] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] for vif in network_info: [ 837.371397] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 837.371397] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] return self._sync_wrapper(fn, *args, **kwargs) [ 837.371397] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 837.371397] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] self.wait() [ 837.371397] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 837.371397] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] self[:] = self._gt.wait() [ 837.371397] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 837.371397] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] return self._exit_event.wait() [ 837.371397] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 837.371735] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] current.throw(*self._exc) [ 837.371735] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 837.371735] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] result = function(*args, **kwargs) [ 837.371735] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 837.371735] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] return func(*args, **kwargs) [ 837.371735] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 837.371735] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] raise e [ 837.371735] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 837.371735] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] nwinfo = self.network_api.allocate_for_instance( [ 837.371735] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 837.371735] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] created_port_ids = self._update_ports_for_instance( [ 837.371735] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 837.371735] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] with excutils.save_and_reraise_exception(): [ 837.372048] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 837.372048] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] self.force_reraise() [ 837.372048] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 837.372048] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] raise self.value [ 837.372048] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 837.372048] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] updated_port = self._update_port( [ 837.372048] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 837.372048] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] _ensure_no_port_binding_failure(port) [ 837.372048] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 837.372048] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] raise exception.PortBindingFailed(port_id=port['id']) [ 837.372048] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] nova.exception.PortBindingFailed: Binding failed for port 4f397c05-0f7d-4b83-a52e-ffc278afadf8, please check neutron logs for more information. [ 837.372048] env[61629]: ERROR nova.compute.manager [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] [ 837.372318] env[61629]: DEBUG nova.compute.utils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Binding failed for port 4f397c05-0f7d-4b83-a52e-ffc278afadf8, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 837.372318] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.472s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.373942] env[61629]: INFO nova.compute.claims [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 837.377529] env[61629]: DEBUG nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Build of instance 76f08ac6-bb83-4d61-9707-b602028c54f2 was re-scheduled: Binding failed for port 4f397c05-0f7d-4b83-a52e-ffc278afadf8, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 837.378157] env[61629]: DEBUG nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 837.379851] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "refresh_cache-76f08ac6-bb83-4d61-9707-b602028c54f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.379851] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "refresh_cache-76f08ac6-bb83-4d61-9707-b602028c54f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.379851] env[61629]: DEBUG nova.network.neutron [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 837.441858] env[61629]: DEBUG nova.compute.manager [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 837.500951] env[61629]: DEBUG nova.compute.manager [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 837.523229] env[61629]: DEBUG nova.virt.hardware [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 837.523716] env[61629]: DEBUG nova.virt.hardware [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 837.524206] env[61629]: DEBUG nova.virt.hardware [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.524527] env[61629]: DEBUG nova.virt.hardware [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 837.524822] env[61629]: DEBUG nova.virt.hardware [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.527856] env[61629]: DEBUG nova.virt.hardware [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 837.527856] env[61629]: DEBUG nova.virt.hardware [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 837.527856] env[61629]: DEBUG nova.virt.hardware [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 837.527856] env[61629]: DEBUG nova.virt.hardware [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 837.527856] env[61629]: DEBUG nova.virt.hardware [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 837.528131] env[61629]: DEBUG nova.virt.hardware [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 837.528131] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afae4377-e5d0-41e8-b954-681b8b7e3fa7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.531134] env[61629]: DEBUG nova.network.neutron [req-40cea0a7-3e1b-41a3-8343-6b55e3e963fc req-35cefa34-735f-4982-9067-fea1ac76ecc0 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.537692] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-929d2fb6-9190-42c6-96cd-b8b58f3f95ca {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.649582] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "refresh_cache-dce0c7e1-1e47-49ad-88f7-f8f5e293d239" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.910229] env[61629]: DEBUG nova.network.neutron [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.960830] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.007650] env[61629]: DEBUG nova.network.neutron [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.035911] env[61629]: DEBUG oslo_concurrency.lockutils [req-40cea0a7-3e1b-41a3-8343-6b55e3e963fc req-35cefa34-735f-4982-9067-fea1ac76ecc0 service nova] Releasing lock "refresh_cache-dce0c7e1-1e47-49ad-88f7-f8f5e293d239" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.035911] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquired lock "refresh_cache-dce0c7e1-1e47-49ad-88f7-f8f5e293d239" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.035911] env[61629]: DEBUG nova.network.neutron [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 838.171886] env[61629]: DEBUG nova.network.neutron [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Successfully updated port: b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 838.259604] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "1d451558-dbbc-4942-b739-5d4b88057a75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.259815] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "1d451558-dbbc-4942-b739-5d4b88057a75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.509355] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "refresh_cache-76f08ac6-bb83-4d61-9707-b602028c54f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.509587] env[61629]: DEBUG nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 838.509795] env[61629]: DEBUG nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 838.509937] env[61629]: DEBUG nova.network.neutron [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 838.529886] env[61629]: DEBUG nova.network.neutron [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.586110] env[61629]: DEBUG nova.network.neutron [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.615833] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc4784fb-2d2e-44e3-be44-2706d37e491e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.624808] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0acc54-ff9f-4918-9cb7-f498aa8df138 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.660626] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e59279f2-ca75-46e5-afcc-406a9375a088 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.668418] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0c1000-14f7-44b9-b866-5fec6702f1b1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.675442] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.675645] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquired lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.675793] env[61629]: DEBUG nova.network.neutron [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 838.691410] env[61629]: DEBUG nova.compute.provider_tree [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.703218] env[61629]: DEBUG nova.compute.manager [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Received event network-vif-plugged-bf10a052-1802-41aa-9428-2bd89d22456d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.703430] env[61629]: DEBUG oslo_concurrency.lockutils [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] Acquiring lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.703726] env[61629]: DEBUG oslo_concurrency.lockutils [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] Lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.703811] env[61629]: DEBUG oslo_concurrency.lockutils [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] Lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.703976] env[61629]: DEBUG nova.compute.manager [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] No waiting events found dispatching network-vif-plugged-bf10a052-1802-41aa-9428-2bd89d22456d {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 838.704270] env[61629]: WARNING nova.compute.manager [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Received unexpected event network-vif-plugged-bf10a052-1802-41aa-9428-2bd89d22456d for instance with vm_state building and task_state spawning. [ 838.704459] env[61629]: DEBUG nova.compute.manager [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Received event network-changed-bf10a052-1802-41aa-9428-2bd89d22456d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.704629] env[61629]: DEBUG nova.compute.manager [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Refreshing instance network info cache due to event network-changed-bf10a052-1802-41aa-9428-2bd89d22456d. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 838.704806] env[61629]: DEBUG oslo_concurrency.lockutils [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] Acquiring lock "refresh_cache-dce0c7e1-1e47-49ad-88f7-f8f5e293d239" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.032290] env[61629]: DEBUG nova.network.neutron [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.200219] env[61629]: DEBUG nova.scheduler.client.report [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 839.242533] env[61629]: DEBUG nova.network.neutron [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 839.267386] env[61629]: DEBUG nova.network.neutron [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Updating instance_info_cache with network_info: [{"id": "ebc79844-43d9-4ea4-b9f1-aa5a92e010e5", "address": "fa:16:3e:0b:50:a0", "network": {"id": "cf4addbb-542d-4e7b-ae32-7fcb17418905", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-635911799", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebc79844-43", "ovs_interfaceid": "ebc79844-43d9-4ea4-b9f1-aa5a92e010e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "67f6d4ea-55d3-4da2-bace-df8324128740", "address": "fa:16:3e:f4:9e:9f", "network": {"id": "b1811bc5-9521-45e7-a7ed-96e38c7f465d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-638624967", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69076131-87ac-46dd-9d5d-8d1b4ea7dec6", "external-id": "nsx-vlan-transportzone-327", "segmentation_id": 327, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67f6d4ea-55", "ovs_interfaceid": "67f6d4ea-55d3-4da2-bace-df8324128740", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bf10a052-1802-41aa-9428-2bd89d22456d", "address": "fa:16:3e:a4:d2:77", "network": {"id": "cf4addbb-542d-4e7b-ae32-7fcb17418905", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-635911799", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.190", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf10a052-18", "ovs_interfaceid": "bf10a052-1802-41aa-9428-2bd89d22456d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.506773] env[61629]: DEBUG nova.network.neutron [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Updating instance_info_cache with network_info: [{"id": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "address": "fa:16:3e:7d:7e:9a", "network": {"id": "7ab21805-1836-4ac0-94d2-d715f9f3352e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1256584900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc5fe81fb0eb4820825cc8e97b8fe4f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8a895f7-ad", "ovs_interfaceid": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.535084] env[61629]: INFO nova.compute.manager [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 76f08ac6-bb83-4d61-9707-b602028c54f2] Took 1.02 seconds to deallocate network for instance. [ 839.703112] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.332s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.704028] env[61629]: DEBUG nova.compute.manager [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 839.706388] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.661s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.770222] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Releasing lock "refresh_cache-dce0c7e1-1e47-49ad-88f7-f8f5e293d239" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.770635] env[61629]: DEBUG nova.compute.manager [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Instance network_info: |[{"id": "ebc79844-43d9-4ea4-b9f1-aa5a92e010e5", "address": "fa:16:3e:0b:50:a0", "network": {"id": "cf4addbb-542d-4e7b-ae32-7fcb17418905", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-635911799", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebc79844-43", "ovs_interfaceid": "ebc79844-43d9-4ea4-b9f1-aa5a92e010e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "67f6d4ea-55d3-4da2-bace-df8324128740", "address": "fa:16:3e:f4:9e:9f", "network": {"id": "b1811bc5-9521-45e7-a7ed-96e38c7f465d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-638624967", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69076131-87ac-46dd-9d5d-8d1b4ea7dec6", "external-id": "nsx-vlan-transportzone-327", "segmentation_id": 327, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67f6d4ea-55", "ovs_interfaceid": "67f6d4ea-55d3-4da2-bace-df8324128740", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bf10a052-1802-41aa-9428-2bd89d22456d", "address": "fa:16:3e:a4:d2:77", "network": {"id": "cf4addbb-542d-4e7b-ae32-7fcb17418905", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-635911799", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.190", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf10a052-18", "ovs_interfaceid": "bf10a052-1802-41aa-9428-2bd89d22456d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 839.770937] env[61629]: DEBUG oslo_concurrency.lockutils [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] Acquired lock "refresh_cache-dce0c7e1-1e47-49ad-88f7-f8f5e293d239" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.771134] env[61629]: DEBUG nova.network.neutron [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Refreshing network info cache for port bf10a052-1802-41aa-9428-2bd89d22456d {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 839.772393] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:50:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ebc79844-43d9-4ea4-b9f1-aa5a92e010e5', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:9e:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69076131-87ac-46dd-9d5d-8d1b4ea7dec6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67f6d4ea-55d3-4da2-bace-df8324128740', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:d2:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf10a052-1802-41aa-9428-2bd89d22456d', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 839.786702] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Creating folder: Project (4ef41f406d18447fbee4e7b7ae52a2d2). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 839.787997] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c72c8e65-faf5-4ab1-b5cf-4d2f97071ddd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.799852] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Created folder: Project (4ef41f406d18447fbee4e7b7ae52a2d2) in parent group-v288443. [ 839.800053] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Creating folder: Instances. Parent ref: group-v288472. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 839.800289] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-56e3a686-fcc8-4b97-ae22-59adea87db55 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.810517] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Created folder: Instances in parent group-v288472. [ 839.810785] env[61629]: DEBUG oslo.service.loopingcall [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 839.810980] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 839.811552] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-77ec136e-f124-4772-b9fd-b743131da6e4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.837175] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 839.837175] env[61629]: value = "task-1354071" [ 839.837175] env[61629]: _type = "Task" [ 839.837175] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.845525] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354071, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.009293] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Releasing lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.009654] env[61629]: DEBUG nova.compute.manager [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Instance network_info: |[{"id": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "address": "fa:16:3e:7d:7e:9a", "network": {"id": "7ab21805-1836-4ac0-94d2-d715f9f3352e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1256584900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc5fe81fb0eb4820825cc8e97b8fe4f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8a895f7-ad", "ovs_interfaceid": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 840.010250] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:7e:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8a895f7-ad9d-4d49-8460-de82459d88f7', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 840.018271] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Creating folder: Project (cc5fe81fb0eb4820825cc8e97b8fe4f2). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 840.018569] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-724c685c-9824-4582-af21-0d106b557bdc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.033652] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Created folder: Project (cc5fe81fb0eb4820825cc8e97b8fe4f2) in parent group-v288443. [ 840.033900] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Creating folder: Instances. Parent ref: group-v288475. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 840.034109] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99695733-cceb-4fa2-a397-41a8672c2e0f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.050026] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Created folder: Instances in parent group-v288475. [ 840.050026] env[61629]: DEBUG oslo.service.loopingcall [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.050026] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 840.050026] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34a6ec21-2957-4498-a8cd-26a1ad45503b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.078162] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 840.078162] env[61629]: value = "task-1354074" [ 840.078162] env[61629]: _type = "Task" [ 840.078162] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.086516] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354074, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.211464] env[61629]: DEBUG nova.compute.utils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 840.217104] env[61629]: DEBUG nova.compute.manager [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 840.217104] env[61629]: DEBUG nova.network.neutron [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 840.299517] env[61629]: DEBUG nova.policy [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b65334bace24147a574491dc4fa889c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4c865d2dd4d4c9eba96f315e09a5e6b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 840.356088] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354071, 'name': CreateVM_Task, 'duration_secs': 0.471126} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.356310] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 840.385552] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.385767] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.386235] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 840.386693] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc4bff78-04d0-4397-84d5-2d489a999f83 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.396282] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 840.396282] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5263297b-64f2-ae34-7cc2-ab5df05abdad" [ 840.396282] env[61629]: _type = "Task" [ 840.396282] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.413352] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5263297b-64f2-ae34-7cc2-ab5df05abdad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.566516] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a581ea7d-f167-48ca-bdfb-392bb1a2a4c3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.573941] env[61629]: DEBUG nova.network.neutron [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Updated VIF entry in instance network info cache for port bf10a052-1802-41aa-9428-2bd89d22456d. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 840.578262] env[61629]: DEBUG nova.network.neutron [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Updating instance_info_cache with network_info: [{"id": "ebc79844-43d9-4ea4-b9f1-aa5a92e010e5", "address": "fa:16:3e:0b:50:a0", "network": {"id": "cf4addbb-542d-4e7b-ae32-7fcb17418905", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-635911799", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebc79844-43", "ovs_interfaceid": "ebc79844-43d9-4ea4-b9f1-aa5a92e010e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "67f6d4ea-55d3-4da2-bace-df8324128740", "address": "fa:16:3e:f4:9e:9f", "network": {"id": "b1811bc5-9521-45e7-a7ed-96e38c7f465d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-638624967", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69076131-87ac-46dd-9d5d-8d1b4ea7dec6", "external-id": "nsx-vlan-transportzone-327", "segmentation_id": 327, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67f6d4ea-55", "ovs_interfaceid": "67f6d4ea-55d3-4da2-bace-df8324128740", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bf10a052-1802-41aa-9428-2bd89d22456d", "address": "fa:16:3e:a4:d2:77", "network": {"id": "cf4addbb-542d-4e7b-ae32-7fcb17418905", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-635911799", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.190", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf10a052-18", "ovs_interfaceid": "bf10a052-1802-41aa-9428-2bd89d22456d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.583177] env[61629]: INFO nova.scheduler.client.report [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleted allocations for instance 76f08ac6-bb83-4d61-9707-b602028c54f2 [ 840.593059] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8ec0bb-c6cb-42eb-932b-86033603abd7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.631398] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354074, 'name': CreateVM_Task, 'duration_secs': 0.321504} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.632255] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff8ebc3-e7b6-434e-a52e-520f8e1f7210 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.634821] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 840.636734] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.642607] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b95f90-e9b8-47dc-9aca-f651762a7293 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.660373] env[61629]: DEBUG nova.compute.provider_tree [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.725976] env[61629]: DEBUG nova.compute.manager [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 840.908436] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5263297b-64f2-ae34-7cc2-ab5df05abdad, 'name': SearchDatastore_Task, 'duration_secs': 0.013971} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.908782] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.909444] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 840.909444] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.909444] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.909677] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 840.909890] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.910221] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 840.910446] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e25879b-95c1-404a-84dc-0115cee1c5e6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.912583] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d51bc9f-d540-4c91-888d-9d7ecde7d9f3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.919456] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 840.919456] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52db5e36-c0e3-7560-574a-0d1128663e7e" [ 840.919456] env[61629]: _type = "Task" [ 840.919456] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.923480] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 840.923669] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 840.927880] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88a673fb-e2ff-4b56-ae37-201cdb0e7bb4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.930839] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52db5e36-c0e3-7560-574a-0d1128663e7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.934349] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 840.934349] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52b945b0-f554-f55f-7aa1-f63015c095b4" [ 840.934349] env[61629]: _type = "Task" [ 840.934349] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.946192] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52b945b0-f554-f55f-7aa1-f63015c095b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.077743] env[61629]: DEBUG oslo_concurrency.lockutils [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] Releasing lock "refresh_cache-dce0c7e1-1e47-49ad-88f7-f8f5e293d239" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.078088] env[61629]: DEBUG nova.compute.manager [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Received event network-vif-plugged-b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 841.078580] env[61629]: DEBUG oslo_concurrency.lockutils [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] Acquiring lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.078580] env[61629]: DEBUG oslo_concurrency.lockutils [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.078580] env[61629]: DEBUG oslo_concurrency.lockutils [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.078906] env[61629]: DEBUG nova.compute.manager [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] No waiting events found dispatching network-vif-plugged-b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 841.078906] env[61629]: WARNING nova.compute.manager [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Received unexpected event network-vif-plugged-b8a895f7-ad9d-4d49-8460-de82459d88f7 for instance with vm_state building and task_state spawning. [ 841.079029] env[61629]: DEBUG nova.compute.manager [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Received event network-changed-b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 841.079220] env[61629]: DEBUG nova.compute.manager [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Refreshing instance network info cache due to event network-changed-b8a895f7-ad9d-4d49-8460-de82459d88f7. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 841.080177] env[61629]: DEBUG oslo_concurrency.lockutils [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] Acquiring lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.080177] env[61629]: DEBUG oslo_concurrency.lockutils [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] Acquired lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.080177] env[61629]: DEBUG nova.network.neutron [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Refreshing network info cache for port b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 841.099691] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1a57bd50-2c3b-4322-9943-cfdb40d76e1d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "76f08ac6-bb83-4d61-9707-b602028c54f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.484s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.100707] env[61629]: DEBUG nova.network.neutron [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Successfully created port: a7d2b513-9c8b-4100-babd-1362e7f4f251 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 841.169217] env[61629]: DEBUG nova.scheduler.client.report [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 841.433428] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52db5e36-c0e3-7560-574a-0d1128663e7e, 'name': SearchDatastore_Task, 'duration_secs': 0.010686} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.433534] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.433784] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.434000] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.448881] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52b945b0-f554-f55f-7aa1-f63015c095b4, 'name': SearchDatastore_Task, 'duration_secs': 0.011264} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.449722] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d220cb36-81a7-4eb8-946d-93f26744becf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.457022] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 841.457022] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52505d12-ffa5-4d28-7552-4bc5fc9a9584" [ 841.457022] env[61629]: _type = "Task" [ 841.457022] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.464085] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52505d12-ffa5-4d28-7552-4bc5fc9a9584, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.607106] env[61629]: DEBUG nova.compute.manager [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 841.675886] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.969s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.676562] env[61629]: ERROR nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cb3c3178-0348-4ebc-9069-c2307b76759c, please check neutron logs for more information. [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Traceback (most recent call last): [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] self.driver.spawn(context, instance, image_meta, [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] vm_ref = self.build_virtual_machine(instance, [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] vif_infos = vmwarevif.get_vif_info(self._session, [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] for vif in network_info: [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] return self._sync_wrapper(fn, *args, **kwargs) [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] self.wait() [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] self[:] = self._gt.wait() [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] return self._exit_event.wait() [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] current.throw(*self._exc) [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] result = function(*args, **kwargs) [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] return func(*args, **kwargs) [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] raise e [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] nwinfo = self.network_api.allocate_for_instance( [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] created_port_ids = self._update_ports_for_instance( [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] with excutils.save_and_reraise_exception(): [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] self.force_reraise() [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 841.676562] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] raise self.value [ 841.677598] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 841.677598] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] updated_port = self._update_port( [ 841.677598] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 841.677598] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] _ensure_no_port_binding_failure(port) [ 841.677598] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 841.677598] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] raise exception.PortBindingFailed(port_id=port['id']) [ 841.677598] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] nova.exception.PortBindingFailed: Binding failed for port cb3c3178-0348-4ebc-9069-c2307b76759c, please check neutron logs for more information. [ 841.677598] env[61629]: ERROR nova.compute.manager [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] [ 841.677598] env[61629]: DEBUG nova.compute.utils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Binding failed for port cb3c3178-0348-4ebc-9069-c2307b76759c, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 841.678761] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.246s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.682014] env[61629]: DEBUG nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Build of instance 3cabd3ef-590a-41f3-a611-3d27b4853db5 was re-scheduled: Binding failed for port cb3c3178-0348-4ebc-9069-c2307b76759c, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 841.682505] env[61629]: DEBUG nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 841.682773] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "refresh_cache-3cabd3ef-590a-41f3-a611-3d27b4853db5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.682960] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquired lock "refresh_cache-3cabd3ef-590a-41f3-a611-3d27b4853db5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.683171] env[61629]: DEBUG nova.network.neutron [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 841.734573] env[61629]: DEBUG nova.compute.manager [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 841.766583] env[61629]: DEBUG nova.virt.hardware [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 841.766821] env[61629]: DEBUG nova.virt.hardware [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 841.767171] env[61629]: DEBUG nova.virt.hardware [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 841.767171] env[61629]: DEBUG nova.virt.hardware [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 841.767735] env[61629]: DEBUG nova.virt.hardware [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 841.767735] env[61629]: DEBUG nova.virt.hardware [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 841.767735] env[61629]: DEBUG nova.virt.hardware [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 841.768014] env[61629]: DEBUG nova.virt.hardware [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 841.768014] env[61629]: DEBUG nova.virt.hardware [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 841.768667] env[61629]: DEBUG nova.virt.hardware [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 841.768916] env[61629]: DEBUG nova.virt.hardware [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 841.769844] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7412a50-6cd6-4c02-94bb-79dfd2dca143 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.778142] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331ad2bb-dfac-4c2c-9a1d-dced8b237d6d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.854276] env[61629]: DEBUG nova.network.neutron [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Updated VIF entry in instance network info cache for port b8a895f7-ad9d-4d49-8460-de82459d88f7. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 841.854910] env[61629]: DEBUG nova.network.neutron [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Updating instance_info_cache with network_info: [{"id": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "address": "fa:16:3e:7d:7e:9a", "network": {"id": "7ab21805-1836-4ac0-94d2-d715f9f3352e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1256584900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc5fe81fb0eb4820825cc8e97b8fe4f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8a895f7-ad", "ovs_interfaceid": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.966765] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52505d12-ffa5-4d28-7552-4bc5fc9a9584, 'name': SearchDatastore_Task, 'duration_secs': 0.04219} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.967034] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.967309] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] dce0c7e1-1e47-49ad-88f7-f8f5e293d239/dce0c7e1-1e47-49ad-88f7-f8f5e293d239.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 841.967625] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.967814] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.968043] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d06d4f7d-b4d0-43c3-817d-3d75936031f8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.970051] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1518d92-29d2-4409-8f90-b3c84f7fec2c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.977293] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 841.977293] env[61629]: value = "task-1354075" [ 841.977293] env[61629]: _type = "Task" [ 841.977293] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.982065] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.982272] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 841.983497] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92f5511f-f3cb-4088-88f8-1c2d03561214 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.989178] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354075, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.992208] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 841.992208] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52c44839-8627-6689-c589-ae054c9d8236" [ 841.992208] env[61629]: _type = "Task" [ 841.992208] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.001097] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c44839-8627-6689-c589-ae054c9d8236, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.135601] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.222623] env[61629]: DEBUG nova.network.neutron [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 842.353014] env[61629]: DEBUG nova.network.neutron [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.357398] env[61629]: DEBUG oslo_concurrency.lockutils [req-7b54c1cf-957d-4b0b-96bf-f6e21e321629 req-244d3930-81a9-476c-bd5b-27594caae3d8 service nova] Releasing lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.502619] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Acquiring lock "109ab664-3bb9-420e-a4a5-526277c60b96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.502903] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Lock "109ab664-3bb9-420e-a4a5-526277c60b96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.503217] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354075, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504154} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.509649] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] dce0c7e1-1e47-49ad-88f7-f8f5e293d239/dce0c7e1-1e47-49ad-88f7-f8f5e293d239.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 842.509879] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 842.510730] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8980370c-ca6d-44e6-af63-058f71d1b129 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.518519] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c44839-8627-6689-c589-ae054c9d8236, 'name': SearchDatastore_Task, 'duration_secs': 0.008989} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.520623] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 842.520623] env[61629]: value = "task-1354076" [ 842.520623] env[61629]: _type = "Task" [ 842.520623] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.520623] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05b99e3e-f52e-4232-8aa1-00d6d9f0cd1c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.532524] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 842.532524] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5206e951-f17a-2562-2885-12f77e8b9f71" [ 842.532524] env[61629]: _type = "Task" [ 842.532524] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.535636] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354076, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.542997] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5206e951-f17a-2562-2885-12f77e8b9f71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.553174] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82276bb0-a226-4a9c-ab62-aac7d92c9fa1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.561567] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae4e5b8-a65c-444e-9768-ae5d7499c81a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.596029] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccc807d-a6a9-44e7-bb26-a036cec3515b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.603523] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85d5149-e8e6-4a23-9663-eb649419a5ee {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.616958] env[61629]: DEBUG nova.compute.provider_tree [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.850079] env[61629]: DEBUG nova.compute.manager [req-84c28ea4-36f0-4c16-a1d1-2edbe48ae754 req-084bd7f0-0324-48b4-bd29-2ab1a24246b1 service nova] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Received event network-vif-plugged-a7d2b513-9c8b-4100-babd-1362e7f4f251 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 842.850327] env[61629]: DEBUG oslo_concurrency.lockutils [req-84c28ea4-36f0-4c16-a1d1-2edbe48ae754 req-084bd7f0-0324-48b4-bd29-2ab1a24246b1 service nova] Acquiring lock "edb4e0f6-57ad-48cf-aa20-3b2549bff3fe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.850542] env[61629]: DEBUG oslo_concurrency.lockutils [req-84c28ea4-36f0-4c16-a1d1-2edbe48ae754 req-084bd7f0-0324-48b4-bd29-2ab1a24246b1 service nova] Lock "edb4e0f6-57ad-48cf-aa20-3b2549bff3fe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.851314] env[61629]: DEBUG oslo_concurrency.lockutils [req-84c28ea4-36f0-4c16-a1d1-2edbe48ae754 req-084bd7f0-0324-48b4-bd29-2ab1a24246b1 service nova] Lock "edb4e0f6-57ad-48cf-aa20-3b2549bff3fe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.851729] env[61629]: DEBUG nova.compute.manager [req-84c28ea4-36f0-4c16-a1d1-2edbe48ae754 req-084bd7f0-0324-48b4-bd29-2ab1a24246b1 service nova] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] No waiting events found dispatching network-vif-plugged-a7d2b513-9c8b-4100-babd-1362e7f4f251 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 842.852066] env[61629]: WARNING nova.compute.manager [req-84c28ea4-36f0-4c16-a1d1-2edbe48ae754 req-084bd7f0-0324-48b4-bd29-2ab1a24246b1 service nova] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Received unexpected event network-vif-plugged-a7d2b513-9c8b-4100-babd-1362e7f4f251 for instance with vm_state building and task_state spawning. [ 842.855559] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Releasing lock "refresh_cache-3cabd3ef-590a-41f3-a611-3d27b4853db5" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.855791] env[61629]: DEBUG nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 842.855922] env[61629]: DEBUG nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 842.856598] env[61629]: DEBUG nova.network.neutron [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 842.874945] env[61629]: DEBUG nova.network.neutron [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 842.887537] env[61629]: DEBUG nova.network.neutron [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Successfully updated port: a7d2b513-9c8b-4100-babd-1362e7f4f251 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 843.033470] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354076, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063297} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.033764] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 843.034624] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04aae8f3-0d52-4248-993a-d9c339fd78ad {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.059498] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5206e951-f17a-2562-2885-12f77e8b9f71, 'name': SearchDatastore_Task, 'duration_secs': 0.010929} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.068217] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] dce0c7e1-1e47-49ad-88f7-f8f5e293d239/dce0c7e1-1e47-49ad-88f7-f8f5e293d239.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.069640] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.069893] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4/fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 843.070131] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5747a281-bb8c-49fe-bed8-693cf50c008a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.087865] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d44bb53d-7810-430c-9f26-406fafb60d08 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.090929] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.091053] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.096510] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 843.096510] env[61629]: value = "task-1354077" [ 843.096510] env[61629]: _type = "Task" [ 843.096510] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.098039] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 843.098039] env[61629]: value = "task-1354078" [ 843.098039] env[61629]: _type = "Task" [ 843.098039] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.109840] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354077, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.112847] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354078, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.120936] env[61629]: DEBUG nova.scheduler.client.report [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 843.378355] env[61629]: DEBUG nova.network.neutron [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.393506] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Acquiring lock "refresh_cache-edb4e0f6-57ad-48cf-aa20-3b2549bff3fe" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.393506] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Acquired lock "refresh_cache-edb4e0f6-57ad-48cf-aa20-3b2549bff3fe" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.393506] env[61629]: DEBUG nova.network.neutron [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 843.609321] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354077, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486327} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.609968] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4/fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 843.610234] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.610491] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c6f350bb-24f4-4ea9-b1c1-3cc53caf3e11 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.616122] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354078, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.620987] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 843.620987] env[61629]: value = "task-1354079" [ 843.620987] env[61629]: _type = "Task" [ 843.620987] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.625832] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.947s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.626451] env[61629]: ERROR nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cd21a5cd-a27c-454a-ac3b-e137434d322b, please check neutron logs for more information. [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Traceback (most recent call last): [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] self.driver.spawn(context, instance, image_meta, [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] vm_ref = self.build_virtual_machine(instance, [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] vif_infos = vmwarevif.get_vif_info(self._session, [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] for vif in network_info: [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] return self._sync_wrapper(fn, *args, **kwargs) [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] self.wait() [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] self[:] = self._gt.wait() [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] return self._exit_event.wait() [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] current.throw(*self._exc) [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] result = function(*args, **kwargs) [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] return func(*args, **kwargs) [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] raise e [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] nwinfo = self.network_api.allocate_for_instance( [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] created_port_ids = self._update_ports_for_instance( [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] with excutils.save_and_reraise_exception(): [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] self.force_reraise() [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 843.626451] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] raise self.value [ 843.627853] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 843.627853] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] updated_port = self._update_port( [ 843.627853] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 843.627853] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] _ensure_no_port_binding_failure(port) [ 843.627853] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 843.627853] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] raise exception.PortBindingFailed(port_id=port['id']) [ 843.627853] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] nova.exception.PortBindingFailed: Binding failed for port cd21a5cd-a27c-454a-ac3b-e137434d322b, please check neutron logs for more information. [ 843.627853] env[61629]: ERROR nova.compute.manager [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] [ 843.627853] env[61629]: DEBUG nova.compute.utils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Binding failed for port cd21a5cd-a27c-454a-ac3b-e137434d322b, please check neutron logs for more information. {{(pid=61629) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 843.631053] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.752s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.631178] env[61629]: DEBUG nova.objects.instance [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Lazy-loading 'resources' on Instance uuid b8cfaef2-5f78-4026-90b8-fe2adacd61e0 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 843.632211] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354079, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.632824] env[61629]: DEBUG nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Build of instance c1bb3820-0c77-4a7e-bcce-17d5e6793ab9 was re-scheduled: Binding failed for port cd21a5cd-a27c-454a-ac3b-e137434d322b, please check neutron logs for more information. {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 843.633473] env[61629]: DEBUG nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Unplugging VIFs for instance {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 843.633568] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "refresh_cache-c1bb3820-0c77-4a7e-bcce-17d5e6793ab9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.633649] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "refresh_cache-c1bb3820-0c77-4a7e-bcce-17d5e6793ab9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.633804] env[61629]: DEBUG nova.network.neutron [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 843.881307] env[61629]: INFO nova.compute.manager [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 3cabd3ef-590a-41f3-a611-3d27b4853db5] Took 1.02 seconds to deallocate network for instance. [ 843.945849] env[61629]: DEBUG nova.network.neutron [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 844.108311] env[61629]: DEBUG nova.network.neutron [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Updating instance_info_cache with network_info: [{"id": "a7d2b513-9c8b-4100-babd-1362e7f4f251", "address": "fa:16:3e:b3:e4:9b", "network": {"id": "8356c8b9-0b28-42ff-afc3-75ddbbe07c2f", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-547328745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4c865d2dd4d4c9eba96f315e09a5e6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7d2b513-9c", "ovs_interfaceid": "a7d2b513-9c8b-4100-babd-1362e7f4f251", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.114792] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354078, 'name': ReconfigVM_Task, 'duration_secs': 0.769263} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.115081] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Reconfigured VM instance instance-0000003f to attach disk [datastore2] dce0c7e1-1e47-49ad-88f7-f8f5e293d239/dce0c7e1-1e47-49ad-88f7-f8f5e293d239.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.115767] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3dcacb9b-4b24-4d6f-ad5e-70cc8b704e28 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.123797] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 844.123797] env[61629]: value = "task-1354080" [ 844.123797] env[61629]: _type = "Task" [ 844.123797] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.136976] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354079, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071745} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.143322] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 844.143322] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354080, 'name': Rename_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.143322] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f530f408-eae2-4468-a466-785676e54276 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.169289] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4/fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 844.170130] env[61629]: DEBUG nova.network.neutron [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 844.174028] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8e748cb-95fd-42c5-a2b1-d30f59161492 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.196049] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 844.196049] env[61629]: value = "task-1354081" [ 844.196049] env[61629]: _type = "Task" [ 844.196049] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.205717] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354081, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.270646] env[61629]: DEBUG nova.network.neutron [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.430335] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009ac3c6-c50c-4346-b361-b9c458cbac37 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.438578] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69c68f4-9e1c-402e-a47b-85a148723e0b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.469580] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7bf65b-4068-4e67-95cc-540993dc3434 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.477040] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f68aa9f-70b3-485c-9f35-75052b73795c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.489845] env[61629]: DEBUG nova.compute.provider_tree [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.610265] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Releasing lock "refresh_cache-edb4e0f6-57ad-48cf-aa20-3b2549bff3fe" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.610600] env[61629]: DEBUG nova.compute.manager [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Instance network_info: |[{"id": "a7d2b513-9c8b-4100-babd-1362e7f4f251", "address": "fa:16:3e:b3:e4:9b", "network": {"id": "8356c8b9-0b28-42ff-afc3-75ddbbe07c2f", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-547328745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4c865d2dd4d4c9eba96f315e09a5e6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7d2b513-9c", "ovs_interfaceid": "a7d2b513-9c8b-4100-babd-1362e7f4f251", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 844.611066] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:e4:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee20e439-fed9-490e-97dd-f3c886977ae1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a7d2b513-9c8b-4100-babd-1362e7f4f251', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.620015] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Creating folder: Project (f4c865d2dd4d4c9eba96f315e09a5e6b). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 844.620015] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01c48bef-7307-4f3a-b784-ed19a4e27962 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.631667] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Created folder: Project (f4c865d2dd4d4c9eba96f315e09a5e6b) in parent group-v288443. [ 844.631850] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Creating folder: Instances. Parent ref: group-v288478. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 844.632441] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1aff45f-d4e0-466f-9056-9a9a02af1cd0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.637031] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354080, 'name': Rename_Task, 'duration_secs': 0.145955} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.637601] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 844.637828] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa5be980-4d9a-49c0-8acd-f882a79b9a6a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.643585] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 844.643585] env[61629]: value = "task-1354084" [ 844.643585] env[61629]: _type = "Task" [ 844.643585] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.647463] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Created folder: Instances in parent group-v288478. [ 844.647688] env[61629]: DEBUG oslo.service.loopingcall [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 844.648222] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 844.648442] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5dc097b2-27d1-400b-b64a-5c7f41d87a8f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.665583] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354084, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.669399] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.669399] env[61629]: value = "task-1354085" [ 844.669399] env[61629]: _type = "Task" [ 844.669399] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.677652] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354085, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.705407] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354081, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.773830] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "refresh_cache-c1bb3820-0c77-4a7e-bcce-17d5e6793ab9" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.774106] env[61629]: DEBUG nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61629) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 844.774319] env[61629]: DEBUG nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 844.774519] env[61629]: DEBUG nova.network.neutron [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 844.790012] env[61629]: DEBUG nova.network.neutron [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 844.874902] env[61629]: DEBUG nova.compute.manager [req-e2834bfe-9ea3-492f-be52-05fb53d72840 req-8adaf834-faef-40ff-baa7-e2b41d04d57d service nova] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Received event network-changed-a7d2b513-9c8b-4100-babd-1362e7f4f251 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 844.875290] env[61629]: DEBUG nova.compute.manager [req-e2834bfe-9ea3-492f-be52-05fb53d72840 req-8adaf834-faef-40ff-baa7-e2b41d04d57d service nova] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Refreshing instance network info cache due to event network-changed-a7d2b513-9c8b-4100-babd-1362e7f4f251. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 844.875532] env[61629]: DEBUG oslo_concurrency.lockutils [req-e2834bfe-9ea3-492f-be52-05fb53d72840 req-8adaf834-faef-40ff-baa7-e2b41d04d57d service nova] Acquiring lock "refresh_cache-edb4e0f6-57ad-48cf-aa20-3b2549bff3fe" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.875677] env[61629]: DEBUG oslo_concurrency.lockutils [req-e2834bfe-9ea3-492f-be52-05fb53d72840 req-8adaf834-faef-40ff-baa7-e2b41d04d57d service nova] Acquired lock "refresh_cache-edb4e0f6-57ad-48cf-aa20-3b2549bff3fe" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.875838] env[61629]: DEBUG nova.network.neutron [req-e2834bfe-9ea3-492f-be52-05fb53d72840 req-8adaf834-faef-40ff-baa7-e2b41d04d57d service nova] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Refreshing network info cache for port a7d2b513-9c8b-4100-babd-1362e7f4f251 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 844.915639] env[61629]: INFO nova.scheduler.client.report [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Deleted allocations for instance 3cabd3ef-590a-41f3-a611-3d27b4853db5 [ 844.995067] env[61629]: DEBUG nova.scheduler.client.report [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 845.154571] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354084, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.178488] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354085, 'name': CreateVM_Task, 'duration_secs': 0.3456} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.178662] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 845.179363] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.179523] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.179846] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 845.180115] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-108753b9-76bb-46c4-990d-4588c9617ddc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.185292] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Waiting for the task: (returnval){ [ 845.185292] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52e57550-0c9b-b671-2cb7-af70d57ee438" [ 845.185292] env[61629]: _type = "Task" [ 845.185292] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.194039] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52e57550-0c9b-b671-2cb7-af70d57ee438, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.206134] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354081, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.292463] env[61629]: DEBUG nova.network.neutron [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.428081] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6c62aa8e-95fa-4992-90c8-1d2775232a21 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "3cabd3ef-590a-41f3-a611-3d27b4853db5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.268s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.499494] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.868s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.504177] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.790s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.505945] env[61629]: INFO nova.compute.claims [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 845.519732] env[61629]: INFO nova.scheduler.client.report [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Deleted allocations for instance b8cfaef2-5f78-4026-90b8-fe2adacd61e0 [ 845.659834] env[61629]: DEBUG oslo_vmware.api [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354084, 'name': PowerOnVM_Task, 'duration_secs': 0.557109} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.659834] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 845.659834] env[61629]: INFO nova.compute.manager [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Took 14.37 seconds to spawn the instance on the hypervisor. [ 845.659834] env[61629]: DEBUG nova.compute.manager [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 845.659834] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9231271b-10c5-42dd-b460-62f752234ea6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.686338] env[61629]: DEBUG nova.network.neutron [req-e2834bfe-9ea3-492f-be52-05fb53d72840 req-8adaf834-faef-40ff-baa7-e2b41d04d57d service nova] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Updated VIF entry in instance network info cache for port a7d2b513-9c8b-4100-babd-1362e7f4f251. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 845.686338] env[61629]: DEBUG nova.network.neutron [req-e2834bfe-9ea3-492f-be52-05fb53d72840 req-8adaf834-faef-40ff-baa7-e2b41d04d57d service nova] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Updating instance_info_cache with network_info: [{"id": "a7d2b513-9c8b-4100-babd-1362e7f4f251", "address": "fa:16:3e:b3:e4:9b", "network": {"id": "8356c8b9-0b28-42ff-afc3-75ddbbe07c2f", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-547328745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f4c865d2dd4d4c9eba96f315e09a5e6b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7d2b513-9c", "ovs_interfaceid": "a7d2b513-9c8b-4100-babd-1362e7f4f251", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.702241] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52e57550-0c9b-b671-2cb7-af70d57ee438, 'name': SearchDatastore_Task, 'duration_secs': 0.009717} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.706186] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.706186] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.706186] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.706186] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.707496] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.708061] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b605344a-dce3-4583-bf32-1d7d06e9f303 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.716028] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354081, 'name': ReconfigVM_Task, 'duration_secs': 1.316827} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.716404] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Reconfigured VM instance instance-00000040 to attach disk [datastore2] fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4/fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 845.717948] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-265a2757-5acf-4f3e-baf0-c2fb90084aee {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.719963] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.720159] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 845.720967] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf1719d2-0a33-4a11-a869-2fbc8f61458e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.727169] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Waiting for the task: (returnval){ [ 845.727169] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52d12f64-70ea-72be-8ecb-a441465a47cd" [ 845.727169] env[61629]: _type = "Task" [ 845.727169] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.731353] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 845.731353] env[61629]: value = "task-1354086" [ 845.731353] env[61629]: _type = "Task" [ 845.731353] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.738547] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d12f64-70ea-72be-8ecb-a441465a47cd, 'name': SearchDatastore_Task, 'duration_secs': 0.009849} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.742374] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354086, 'name': Rename_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.742374] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70d882d7-9d9e-4fc8-829e-5118c524b6ab {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.747294] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Waiting for the task: (returnval){ [ 845.747294] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52d31bb0-8f5d-7abd-1740-1fac078f9eb5" [ 845.747294] env[61629]: _type = "Task" [ 845.747294] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.755221] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d31bb0-8f5d-7abd-1740-1fac078f9eb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.795736] env[61629]: INFO nova.compute.manager [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: c1bb3820-0c77-4a7e-bcce-17d5e6793ab9] Took 1.02 seconds to deallocate network for instance. [ 845.931044] env[61629]: DEBUG nova.compute.manager [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 846.027563] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c9b11d9-bab7-4f0a-be62-44463f844118 tempest-ServersAaction247Test-457592037 tempest-ServersAaction247Test-457592037-project-member] Lock "b8cfaef2-5f78-4026-90b8-fe2adacd61e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.892s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.186850] env[61629]: INFO nova.compute.manager [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Took 34.49 seconds to build instance. [ 846.193811] env[61629]: DEBUG oslo_concurrency.lockutils [req-e2834bfe-9ea3-492f-be52-05fb53d72840 req-8adaf834-faef-40ff-baa7-e2b41d04d57d service nova] Releasing lock "refresh_cache-edb4e0f6-57ad-48cf-aa20-3b2549bff3fe" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.241779] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354086, 'name': Rename_Task, 'duration_secs': 0.165256} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.242044] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 846.242366] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64cfbf6b-7a68-4de0-86ab-56a0dbc9276c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.249075] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 846.249075] env[61629]: value = "task-1354087" [ 846.249075] env[61629]: _type = "Task" [ 846.249075] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.260729] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d31bb0-8f5d-7abd-1740-1fac078f9eb5, 'name': SearchDatastore_Task, 'duration_secs': 0.009879} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.263355] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.263620] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] edb4e0f6-57ad-48cf-aa20-3b2549bff3fe/edb4e0f6-57ad-48cf-aa20-3b2549bff3fe.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 846.263887] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354087, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.264143] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05234301-b709-4d6b-9a93-3168ad380c40 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.270956] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Waiting for the task: (returnval){ [ 846.270956] env[61629]: value = "task-1354088" [ 846.270956] env[61629]: _type = "Task" [ 846.270956] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.282400] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': task-1354088, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.460886] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.690503] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ab4282dd-59c8-4b8f-9b61-ae833b7db5c4 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.377s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.762529] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354087, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.783786] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': task-1354088, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50321} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.783912] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] edb4e0f6-57ad-48cf-aa20-3b2549bff3fe/edb4e0f6-57ad-48cf-aa20-3b2549bff3fe.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 846.784156] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 846.784421] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af9e85cb-15dc-4c04-8c5d-ee86481b14dd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.790513] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Waiting for the task: (returnval){ [ 846.790513] env[61629]: value = "task-1354089" [ 846.790513] env[61629]: _type = "Task" [ 846.790513] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.801863] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': task-1354089, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.809678] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08f6f89-6b54-458c-bee7-f612ebd33f56 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.816491] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a2c616-e7ac-4fec-a07b-4119afee0384 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.849943] env[61629]: INFO nova.scheduler.client.report [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Deleted allocations for instance c1bb3820-0c77-4a7e-bcce-17d5e6793ab9 [ 846.857087] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2823129-924f-4497-b174-b44c7ff59997 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.866857] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35832371-8d1e-47bd-a07c-da84481e0d52 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.882249] env[61629]: DEBUG oslo_concurrency.lockutils [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.882450] env[61629]: DEBUG oslo_concurrency.lockutils [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.882654] env[61629]: DEBUG oslo_concurrency.lockutils [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.882832] env[61629]: DEBUG oslo_concurrency.lockutils [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.883445] env[61629]: DEBUG oslo_concurrency.lockutils [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.884958] env[61629]: DEBUG nova.compute.provider_tree [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.888712] env[61629]: INFO nova.compute.manager [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Terminating instance [ 846.888815] env[61629]: DEBUG nova.compute.manager [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 846.888972] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 846.889781] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc7e56f-f4ed-4320-8eed-61b528010852 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.897608] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 846.897843] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-848d7c4e-a641-4ae5-b6c5-f8c4bcc6729e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.903648] env[61629]: DEBUG oslo_vmware.api [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 846.903648] env[61629]: value = "task-1354090" [ 846.903648] env[61629]: _type = "Task" [ 846.903648] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.911794] env[61629]: DEBUG oslo_vmware.api [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354090, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.193617] env[61629]: DEBUG nova.compute.manager [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 847.264023] env[61629]: DEBUG oslo_vmware.api [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354087, 'name': PowerOnVM_Task, 'duration_secs': 0.644417} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.264023] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 847.264023] env[61629]: INFO nova.compute.manager [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Took 9.76 seconds to spawn the instance on the hypervisor. [ 847.264023] env[61629]: DEBUG nova.compute.manager [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 847.264831] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af21ee2-d580-44a4-9f64-ce3162fb94de {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.299918] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': task-1354089, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085722} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.300218] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 847.301142] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3e8d2c8-53d5-441e-ac2e-524d9da98903 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.327303] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] edb4e0f6-57ad-48cf-aa20-3b2549bff3fe/edb4e0f6-57ad-48cf-aa20-3b2549bff3fe.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 847.328676] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8412d6e4-cb4b-4a1b-8844-2ad86bb5043d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.349536] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Waiting for the task: (returnval){ [ 847.349536] env[61629]: value = "task-1354091" [ 847.349536] env[61629]: _type = "Task" [ 847.349536] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.358745] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': task-1354091, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.366949] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8131792-b475-49e6-af0c-27e8bb66cc47 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "c1bb3820-0c77-4a7e-bcce-17d5e6793ab9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.122s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.389864] env[61629]: DEBUG nova.scheduler.client.report [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 847.414245] env[61629]: DEBUG oslo_vmware.api [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354090, 'name': PowerOffVM_Task, 'duration_secs': 0.380398} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.414527] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 847.414692] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 847.414938] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b965b98-1afc-4f51-a405-0886360f95da {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.598365] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 847.598688] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 847.598969] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Deleting the datastore file [datastore2] dce0c7e1-1e47-49ad-88f7-f8f5e293d239 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 847.599375] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e316b82-d70e-45ea-8cb6-6e092fc41089 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.607382] env[61629]: DEBUG oslo_vmware.api [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 847.607382] env[61629]: value = "task-1354093" [ 847.607382] env[61629]: _type = "Task" [ 847.607382] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.620035] env[61629]: DEBUG oslo_vmware.api [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354093, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.712705] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.782100] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.782100] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.783495] env[61629]: INFO nova.compute.manager [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Took 29.45 seconds to build instance. [ 847.861191] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': task-1354091, 'name': ReconfigVM_Task, 'duration_secs': 0.285437} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.861518] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Reconfigured VM instance instance-00000041 to attach disk [datastore2] edb4e0f6-57ad-48cf-aa20-3b2549bff3fe/edb4e0f6-57ad-48cf-aa20-3b2549bff3fe.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 847.862329] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17096a14-62c5-43be-be30-7a57a3513177 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.869137] env[61629]: DEBUG nova.compute.manager [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 847.871801] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Waiting for the task: (returnval){ [ 847.871801] env[61629]: value = "task-1354094" [ 847.871801] env[61629]: _type = "Task" [ 847.871801] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.880755] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': task-1354094, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.894796] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.895530] env[61629]: DEBUG nova.compute.manager [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 847.898512] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.204s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.900839] env[61629]: INFO nova.compute.claims [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 848.117364] env[61629]: DEBUG oslo_vmware.api [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354093, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.234895} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.117561] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 848.117752] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 848.117925] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 848.118117] env[61629]: INFO nova.compute.manager [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Took 1.23 seconds to destroy the instance on the hypervisor. [ 848.118401] env[61629]: DEBUG oslo.service.loopingcall [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 848.118605] env[61629]: DEBUG nova.compute.manager [-] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 848.118712] env[61629]: DEBUG nova.network.neutron [-] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 848.285571] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9ec4f8a2-72ef-482b-92d9-0b8ade3298e5 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.358s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.387210] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "7cf87381-235e-449b-8269-61c2d4033028" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.387462] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "7cf87381-235e-449b-8269-61c2d4033028" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.387660] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': task-1354094, 'name': Rename_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.398333] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.403808] env[61629]: DEBUG nova.compute.utils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 848.407080] env[61629]: DEBUG nova.compute.manager [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 848.407188] env[61629]: DEBUG nova.network.neutron [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 848.458896] env[61629]: DEBUG nova.policy [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e28dac2dd40476fbaecbe1e380815ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5810b150c2146a58b8ceb44592a03e9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 848.724014] env[61629]: DEBUG nova.network.neutron [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Successfully created port: be65c78b-11aa-43c2-aad9-f7f19a2b47c1 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 848.790365] env[61629]: DEBUG nova.compute.manager [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 848.850884] env[61629]: DEBUG nova.compute.manager [req-876484bf-33eb-4492-b085-a26a821fb348 req-8a4a5e36-4e0c-4207-b7b2-b7334d77c55e service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Received event network-vif-deleted-67f6d4ea-55d3-4da2-bace-df8324128740 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 848.850884] env[61629]: INFO nova.compute.manager [req-876484bf-33eb-4492-b085-a26a821fb348 req-8a4a5e36-4e0c-4207-b7b2-b7334d77c55e service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Neutron deleted interface 67f6d4ea-55d3-4da2-bace-df8324128740; detaching it from the instance and deleting it from the info cache [ 848.851550] env[61629]: DEBUG nova.network.neutron [req-876484bf-33eb-4492-b085-a26a821fb348 req-8a4a5e36-4e0c-4207-b7b2-b7334d77c55e service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Updating instance_info_cache with network_info: [{"id": "ebc79844-43d9-4ea4-b9f1-aa5a92e010e5", "address": "fa:16:3e:0b:50:a0", "network": {"id": "cf4addbb-542d-4e7b-ae32-7fcb17418905", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-635911799", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.60", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebc79844-43", "ovs_interfaceid": "ebc79844-43d9-4ea4-b9f1-aa5a92e010e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bf10a052-1802-41aa-9428-2bd89d22456d", "address": "fa:16:3e:a4:d2:77", "network": {"id": "cf4addbb-542d-4e7b-ae32-7fcb17418905", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-635911799", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.190", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf10a052-18", "ovs_interfaceid": "bf10a052-1802-41aa-9428-2bd89d22456d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.887710] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': task-1354094, 'name': Rename_Task, 'duration_secs': 0.845946} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.888256] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 848.888517] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ff78a56-3ed8-427c-a418-b303356b397e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.895148] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Waiting for the task: (returnval){ [ 848.895148] env[61629]: value = "task-1354095" [ 848.895148] env[61629]: _type = "Task" [ 848.895148] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.903315] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': task-1354095, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.910023] env[61629]: DEBUG nova.compute.manager [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 849.224358] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f80ef6f-c790-4706-a940-94476275b952 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.232170] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70797048-30fc-49f7-bf78-8d55a1475c75 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.261948] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa38f24-4dff-4441-8167-abe7e909635a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.270874] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0962e02b-74c5-409f-a6cb-11040d01a869 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.285274] env[61629]: DEBUG nova.compute.provider_tree [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.308776] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.359737] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2476c5d7-ecb4-4f70-9ded-b6d0879539e8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.370204] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92867fa0-28f3-4382-a18b-276e99357b23 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.396605] env[61629]: DEBUG nova.compute.manager [req-876484bf-33eb-4492-b085-a26a821fb348 req-8a4a5e36-4e0c-4207-b7b2-b7334d77c55e service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Detach interface failed, port_id=67f6d4ea-55d3-4da2-bace-df8324128740, reason: Instance dce0c7e1-1e47-49ad-88f7-f8f5e293d239 could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 849.404083] env[61629]: DEBUG oslo_vmware.api [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': task-1354095, 'name': PowerOnVM_Task, 'duration_secs': 0.4719} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.404381] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 849.404896] env[61629]: INFO nova.compute.manager [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Took 7.67 seconds to spawn the instance on the hypervisor. [ 849.404896] env[61629]: DEBUG nova.compute.manager [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 849.405691] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2760537a-07e6-4f3c-bc5d-69c28bcb353d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.788163] env[61629]: DEBUG nova.scheduler.client.report [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 849.835915] env[61629]: DEBUG nova.network.neutron [-] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.922816] env[61629]: DEBUG nova.compute.manager [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 849.923973] env[61629]: INFO nova.compute.manager [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Took 25.04 seconds to build instance. [ 849.950450] env[61629]: DEBUG nova.virt.hardware [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 849.950450] env[61629]: DEBUG nova.virt.hardware [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 849.950628] env[61629]: DEBUG nova.virt.hardware [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 849.951356] env[61629]: DEBUG nova.virt.hardware [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 849.951565] env[61629]: DEBUG nova.virt.hardware [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 849.951694] env[61629]: DEBUG nova.virt.hardware [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 849.951915] env[61629]: DEBUG nova.virt.hardware [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 849.952094] env[61629]: DEBUG nova.virt.hardware [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 849.952269] env[61629]: DEBUG nova.virt.hardware [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 849.952434] env[61629]: DEBUG nova.virt.hardware [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 849.952606] env[61629]: DEBUG nova.virt.hardware [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 849.953759] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8e52cd-7771-49f9-8cf7-563527a8a479 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.962420] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bffbec18-fc0f-41f2-b67b-dbe1d16f3d0e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.222383] env[61629]: DEBUG nova.compute.manager [req-c9bb9458-bb24-45c6-9df3-d67c0a1a5410 req-e1acfd94-13e4-4d41-9898-fa7bbc611419 service nova] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Received event network-vif-plugged-be65c78b-11aa-43c2-aad9-f7f19a2b47c1 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 850.222587] env[61629]: DEBUG oslo_concurrency.lockutils [req-c9bb9458-bb24-45c6-9df3-d67c0a1a5410 req-e1acfd94-13e4-4d41-9898-fa7bbc611419 service nova] Acquiring lock "c3f830d6-8999-49d5-a431-b09dfdaf8313-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.222799] env[61629]: DEBUG oslo_concurrency.lockutils [req-c9bb9458-bb24-45c6-9df3-d67c0a1a5410 req-e1acfd94-13e4-4d41-9898-fa7bbc611419 service nova] Lock "c3f830d6-8999-49d5-a431-b09dfdaf8313-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.222963] env[61629]: DEBUG oslo_concurrency.lockutils [req-c9bb9458-bb24-45c6-9df3-d67c0a1a5410 req-e1acfd94-13e4-4d41-9898-fa7bbc611419 service nova] Lock "c3f830d6-8999-49d5-a431-b09dfdaf8313-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.223332] env[61629]: DEBUG nova.compute.manager [req-c9bb9458-bb24-45c6-9df3-d67c0a1a5410 req-e1acfd94-13e4-4d41-9898-fa7bbc611419 service nova] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] No waiting events found dispatching network-vif-plugged-be65c78b-11aa-43c2-aad9-f7f19a2b47c1 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 850.223546] env[61629]: WARNING nova.compute.manager [req-c9bb9458-bb24-45c6-9df3-d67c0a1a5410 req-e1acfd94-13e4-4d41-9898-fa7bbc611419 service nova] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Received unexpected event network-vif-plugged-be65c78b-11aa-43c2-aad9-f7f19a2b47c1 for instance with vm_state building and task_state spawning. [ 850.293031] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.394s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.293572] env[61629]: DEBUG nova.compute.manager [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 850.296473] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.406s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.297881] env[61629]: INFO nova.compute.claims [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 850.314400] env[61629]: DEBUG nova.network.neutron [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Successfully updated port: be65c78b-11aa-43c2-aad9-f7f19a2b47c1 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 850.338289] env[61629]: INFO nova.compute.manager [-] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Took 2.22 seconds to deallocate network for instance. [ 850.425762] env[61629]: DEBUG oslo_concurrency.lockutils [None req-19f20530-3065-4a67-8f70-53f19c2efe8f tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Lock "edb4e0f6-57ad-48cf-aa20-3b2549bff3fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.804s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.803348] env[61629]: DEBUG nova.compute.utils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 850.807767] env[61629]: DEBUG nova.compute.manager [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 850.808434] env[61629]: DEBUG nova.network.neutron [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 850.814886] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquiring lock "refresh_cache-c3f830d6-8999-49d5-a431-b09dfdaf8313" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.815809] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquired lock "refresh_cache-c3f830d6-8999-49d5-a431-b09dfdaf8313" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.815809] env[61629]: DEBUG nova.network.neutron [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 850.845190] env[61629]: DEBUG oslo_concurrency.lockutils [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.847347] env[61629]: DEBUG nova.policy [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64ff84e38bb942bfa942de62353bd356', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b0b101e81dfe4c8b98314be278282c0d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 850.881222] env[61629]: DEBUG nova.compute.manager [req-5187a3ec-eb45-48bb-81b7-082aa6367e8c req-44f5a60a-d16c-47f1-ba65-b6b641fe9349 service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Received event network-changed-b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 850.881424] env[61629]: DEBUG nova.compute.manager [req-5187a3ec-eb45-48bb-81b7-082aa6367e8c req-44f5a60a-d16c-47f1-ba65-b6b641fe9349 service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Refreshing instance network info cache due to event network-changed-b8a895f7-ad9d-4d49-8460-de82459d88f7. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 850.881967] env[61629]: DEBUG oslo_concurrency.lockutils [req-5187a3ec-eb45-48bb-81b7-082aa6367e8c req-44f5a60a-d16c-47f1-ba65-b6b641fe9349 service nova] Acquiring lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.882156] env[61629]: DEBUG oslo_concurrency.lockutils [req-5187a3ec-eb45-48bb-81b7-082aa6367e8c req-44f5a60a-d16c-47f1-ba65-b6b641fe9349 service nova] Acquired lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.884174] env[61629]: DEBUG nova.network.neutron [req-5187a3ec-eb45-48bb-81b7-082aa6367e8c req-44f5a60a-d16c-47f1-ba65-b6b641fe9349 service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Refreshing network info cache for port b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 850.928019] env[61629]: DEBUG nova.compute.manager [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 851.199147] env[61629]: DEBUG nova.network.neutron [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Successfully created port: 7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 851.313404] env[61629]: DEBUG nova.compute.manager [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 851.350126] env[61629]: DEBUG nova.network.neutron [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 851.446755] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.501517] env[61629]: DEBUG nova.network.neutron [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Updating instance_info_cache with network_info: [{"id": "be65c78b-11aa-43c2-aad9-f7f19a2b47c1", "address": "fa:16:3e:d7:89:40", "network": {"id": "b78b4b2d-27f5-44cd-970e-f9162da4173c", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-836906968-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5810b150c2146a58b8ceb44592a03e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe65c78b-11", "ovs_interfaceid": "be65c78b-11aa-43c2-aad9-f7f19a2b47c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.624856] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a57c16e-1739-4849-b0c1-1273a64e3b3f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.640256] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca2dae0-728e-4fca-878e-3a8d14ae1856 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.640256] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Acquiring lock "edb4e0f6-57ad-48cf-aa20-3b2549bff3fe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.640630] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Lock "edb4e0f6-57ad-48cf-aa20-3b2549bff3fe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.641517] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Acquiring lock "edb4e0f6-57ad-48cf-aa20-3b2549bff3fe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.641839] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Lock "edb4e0f6-57ad-48cf-aa20-3b2549bff3fe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.642198] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Lock "edb4e0f6-57ad-48cf-aa20-3b2549bff3fe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.645445] env[61629]: INFO nova.compute.manager [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Terminating instance [ 851.647895] env[61629]: DEBUG nova.compute.manager [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 851.648227] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 851.649112] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12569b60-242c-42d2-958d-419559db48db {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.683307] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde9792d-c310-4ed9-9ed4-cfcb265c64fd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.691410] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 851.691410] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c973e0f0-81c4-4936-af09-f69425d3e32f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.694511] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c0b16e-fef4-4d00-9aba-a92799ec2f00 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.700219] env[61629]: DEBUG oslo_vmware.api [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Waiting for the task: (returnval){ [ 851.700219] env[61629]: value = "task-1354096" [ 851.700219] env[61629]: _type = "Task" [ 851.700219] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.712058] env[61629]: DEBUG nova.compute.provider_tree [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.720452] env[61629]: DEBUG oslo_vmware.api [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': task-1354096, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.768409] env[61629]: DEBUG nova.network.neutron [req-5187a3ec-eb45-48bb-81b7-082aa6367e8c req-44f5a60a-d16c-47f1-ba65-b6b641fe9349 service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Updated VIF entry in instance network info cache for port b8a895f7-ad9d-4d49-8460-de82459d88f7. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 851.768822] env[61629]: DEBUG nova.network.neutron [req-5187a3ec-eb45-48bb-81b7-082aa6367e8c req-44f5a60a-d16c-47f1-ba65-b6b641fe9349 service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Updating instance_info_cache with network_info: [{"id": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "address": "fa:16:3e:7d:7e:9a", "network": {"id": "7ab21805-1836-4ac0-94d2-d715f9f3352e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1256584900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc5fe81fb0eb4820825cc8e97b8fe4f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8a895f7-ad", "ovs_interfaceid": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.016348] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Releasing lock "refresh_cache-c3f830d6-8999-49d5-a431-b09dfdaf8313" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.016764] env[61629]: DEBUG nova.compute.manager [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Instance network_info: |[{"id": "be65c78b-11aa-43c2-aad9-f7f19a2b47c1", "address": "fa:16:3e:d7:89:40", "network": {"id": "b78b4b2d-27f5-44cd-970e-f9162da4173c", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-836906968-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5810b150c2146a58b8ceb44592a03e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe65c78b-11", "ovs_interfaceid": "be65c78b-11aa-43c2-aad9-f7f19a2b47c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 852.017226] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:89:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c47e98ff-83cf-48d2-bf91-2931c7386b6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be65c78b-11aa-43c2-aad9-f7f19a2b47c1', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 852.025505] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Creating folder: Project (a5810b150c2146a58b8ceb44592a03e9). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 852.025809] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d83d6ed-82d9-4baa-983c-324417494640 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.039743] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Created folder: Project (a5810b150c2146a58b8ceb44592a03e9) in parent group-v288443. [ 852.039947] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Creating folder: Instances. Parent ref: group-v288481. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 852.040206] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47a38450-78b7-4a39-b8ca-53b50713a3f0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.049063] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Created folder: Instances in parent group-v288481. [ 852.049316] env[61629]: DEBUG oslo.service.loopingcall [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 852.049515] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 852.049720] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7cd7067b-59e7-4dad-a5ec-8bfd68356cf9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.068876] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 852.068876] env[61629]: value = "task-1354099" [ 852.068876] env[61629]: _type = "Task" [ 852.068876] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.077054] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354099, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.209996] env[61629]: DEBUG oslo_vmware.api [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': task-1354096, 'name': PowerOffVM_Task, 'duration_secs': 0.179874} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.210791] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 852.210990] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 852.211580] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5972402-c012-4bea-8802-93849844a92d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.215435] env[61629]: DEBUG nova.scheduler.client.report [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 852.248234] env[61629]: DEBUG nova.compute.manager [req-863ac754-7dd2-4f1c-a2a2-7ca24b594073 req-76341225-1fb0-4150-97cb-7c09e6c1ac2e service nova] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Received event network-changed-be65c78b-11aa-43c2-aad9-f7f19a2b47c1 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 852.248586] env[61629]: DEBUG nova.compute.manager [req-863ac754-7dd2-4f1c-a2a2-7ca24b594073 req-76341225-1fb0-4150-97cb-7c09e6c1ac2e service nova] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Refreshing instance network info cache due to event network-changed-be65c78b-11aa-43c2-aad9-f7f19a2b47c1. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 852.248837] env[61629]: DEBUG oslo_concurrency.lockutils [req-863ac754-7dd2-4f1c-a2a2-7ca24b594073 req-76341225-1fb0-4150-97cb-7c09e6c1ac2e service nova] Acquiring lock "refresh_cache-c3f830d6-8999-49d5-a431-b09dfdaf8313" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.249184] env[61629]: DEBUG oslo_concurrency.lockutils [req-863ac754-7dd2-4f1c-a2a2-7ca24b594073 req-76341225-1fb0-4150-97cb-7c09e6c1ac2e service nova] Acquired lock "refresh_cache-c3f830d6-8999-49d5-a431-b09dfdaf8313" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.249968] env[61629]: DEBUG nova.network.neutron [req-863ac754-7dd2-4f1c-a2a2-7ca24b594073 req-76341225-1fb0-4150-97cb-7c09e6c1ac2e service nova] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Refreshing network info cache for port be65c78b-11aa-43c2-aad9-f7f19a2b47c1 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 852.272049] env[61629]: DEBUG oslo_concurrency.lockutils [req-5187a3ec-eb45-48bb-81b7-082aa6367e8c req-44f5a60a-d16c-47f1-ba65-b6b641fe9349 service nova] Releasing lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.272397] env[61629]: DEBUG nova.compute.manager [req-5187a3ec-eb45-48bb-81b7-082aa6367e8c req-44f5a60a-d16c-47f1-ba65-b6b641fe9349 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Received event network-vif-deleted-bf10a052-1802-41aa-9428-2bd89d22456d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 852.272664] env[61629]: DEBUG nova.compute.manager [req-5187a3ec-eb45-48bb-81b7-082aa6367e8c req-44f5a60a-d16c-47f1-ba65-b6b641fe9349 service nova] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Received event network-vif-deleted-ebc79844-43d9-4ea4-b9f1-aa5a92e010e5 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 852.287137] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 852.287479] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 852.287765] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Deleting the datastore file [datastore2] edb4e0f6-57ad-48cf-aa20-3b2549bff3fe {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 852.288325] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-70aa0a2a-f13f-41de-b645-0828fc391770 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.296424] env[61629]: DEBUG oslo_vmware.api [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Waiting for the task: (returnval){ [ 852.296424] env[61629]: value = "task-1354101" [ 852.296424] env[61629]: _type = "Task" [ 852.296424] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.304094] env[61629]: DEBUG oslo_vmware.api [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': task-1354101, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.321857] env[61629]: DEBUG nova.compute.manager [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 852.349663] env[61629]: DEBUG nova.virt.hardware [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 852.349792] env[61629]: DEBUG nova.virt.hardware [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 852.349946] env[61629]: DEBUG nova.virt.hardware [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 852.350141] env[61629]: DEBUG nova.virt.hardware [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 852.350285] env[61629]: DEBUG nova.virt.hardware [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 852.350428] env[61629]: DEBUG nova.virt.hardware [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 852.350639] env[61629]: DEBUG nova.virt.hardware [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 852.350794] env[61629]: DEBUG nova.virt.hardware [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 852.350957] env[61629]: DEBUG nova.virt.hardware [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 852.351133] env[61629]: DEBUG nova.virt.hardware [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 852.351304] env[61629]: DEBUG nova.virt.hardware [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 852.352175] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb06c85-41cf-4b10-9029-41ea63353f70 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.359972] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89eb9269-c354-41e2-91ca-1521247d2bd1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.578779] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354099, 'name': CreateVM_Task, 'duration_secs': 0.282749} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.579035] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 852.579777] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.580014] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.580386] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 852.580681] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60cd57b6-1a3c-4cf3-8452-37e0954c8da8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.585303] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Waiting for the task: (returnval){ [ 852.585303] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5212e547-e50a-303f-58cf-44bd62768c9a" [ 852.585303] env[61629]: _type = "Task" [ 852.585303] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.592659] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5212e547-e50a-303f-58cf-44bd62768c9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.720963] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.424s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.721541] env[61629]: DEBUG nova.compute.manager [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 852.724289] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.764s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.725948] env[61629]: INFO nova.compute.claims [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 852.808084] env[61629]: DEBUG oslo_vmware.api [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Task: {'id': task-1354101, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161443} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.808084] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 852.808084] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 852.808084] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 852.808084] env[61629]: INFO nova.compute.manager [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Took 1.16 seconds to destroy the instance on the hypervisor. [ 852.808377] env[61629]: DEBUG oslo.service.loopingcall [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 852.810351] env[61629]: DEBUG nova.compute.manager [-] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 852.810452] env[61629]: DEBUG nova.network.neutron [-] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 853.097415] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5212e547-e50a-303f-58cf-44bd62768c9a, 'name': SearchDatastore_Task, 'duration_secs': 0.008712} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.097761] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.097992] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 853.098264] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.098411] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.098668] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 853.098954] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd976483-c2ca-4d07-8a3e-e7c8a32d9538 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.103573] env[61629]: DEBUG nova.compute.manager [req-f7cc5472-7935-4cb0-8dd9-4d3c42161827 req-ac1a65f2-2435-4ca4-85c6-de4e4d849cbc service nova] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Received event network-vif-deleted-a7d2b513-9c8b-4100-babd-1362e7f4f251 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.103761] env[61629]: INFO nova.compute.manager [req-f7cc5472-7935-4cb0-8dd9-4d3c42161827 req-ac1a65f2-2435-4ca4-85c6-de4e4d849cbc service nova] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Neutron deleted interface a7d2b513-9c8b-4100-babd-1362e7f4f251; detaching it from the instance and deleting it from the info cache [ 853.103931] env[61629]: DEBUG nova.network.neutron [req-f7cc5472-7935-4cb0-8dd9-4d3c42161827 req-ac1a65f2-2435-4ca4-85c6-de4e4d849cbc service nova] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.111931] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 853.112355] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 853.113755] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c117ac75-cec7-4441-8760-0735d40987ff {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.119636] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Waiting for the task: (returnval){ [ 853.119636] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52f5579b-d8ff-a119-c0db-d5865a12acb6" [ 853.119636] env[61629]: _type = "Task" [ 853.119636] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.128038] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52f5579b-d8ff-a119-c0db-d5865a12acb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.230543] env[61629]: DEBUG nova.compute.utils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 853.236918] env[61629]: DEBUG nova.compute.manager [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 853.236918] env[61629]: DEBUG nova.network.neutron [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 853.300458] env[61629]: DEBUG nova.network.neutron [req-863ac754-7dd2-4f1c-a2a2-7ca24b594073 req-76341225-1fb0-4150-97cb-7c09e6c1ac2e service nova] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Updated VIF entry in instance network info cache for port be65c78b-11aa-43c2-aad9-f7f19a2b47c1. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 853.300987] env[61629]: DEBUG nova.network.neutron [req-863ac754-7dd2-4f1c-a2a2-7ca24b594073 req-76341225-1fb0-4150-97cb-7c09e6c1ac2e service nova] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Updating instance_info_cache with network_info: [{"id": "be65c78b-11aa-43c2-aad9-f7f19a2b47c1", "address": "fa:16:3e:d7:89:40", "network": {"id": "b78b4b2d-27f5-44cd-970e-f9162da4173c", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-836906968-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5810b150c2146a58b8ceb44592a03e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe65c78b-11", "ovs_interfaceid": "be65c78b-11aa-43c2-aad9-f7f19a2b47c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.305499] env[61629]: DEBUG nova.policy [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2da5bc6775a24e2bbab129d727de895f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27cc5f4c983a4a40aca3f207a6fed658', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 853.307591] env[61629]: DEBUG nova.network.neutron [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Successfully updated port: 7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 853.545135] env[61629]: DEBUG nova.network.neutron [-] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.582433] env[61629]: DEBUG nova.network.neutron [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Successfully created port: 26b1c08a-ffa0-488a-ae0b-482ca395c8ad {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 853.606714] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6cfa3480-9abf-4067-bd74-d96a2681c892 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.617857] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f6318e-e932-44c1-b2d7-3b82df871f94 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.644537] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52f5579b-d8ff-a119-c0db-d5865a12acb6, 'name': SearchDatastore_Task, 'duration_secs': 0.011075} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.654031] env[61629]: DEBUG nova.compute.manager [req-f7cc5472-7935-4cb0-8dd9-4d3c42161827 req-ac1a65f2-2435-4ca4-85c6-de4e4d849cbc service nova] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Detach interface failed, port_id=a7d2b513-9c8b-4100-babd-1362e7f4f251, reason: Instance edb4e0f6-57ad-48cf-aa20-3b2549bff3fe could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 853.654718] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d5b936d-4c16-45f0-b17a-0c5c333d261b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.661966] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Waiting for the task: (returnval){ [ 853.661966] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52cca22e-9082-23ab-56a4-06d47ea52195" [ 853.661966] env[61629]: _type = "Task" [ 853.661966] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.670524] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52cca22e-9082-23ab-56a4-06d47ea52195, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.739468] env[61629]: DEBUG nova.compute.manager [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 853.804646] env[61629]: DEBUG oslo_concurrency.lockutils [req-863ac754-7dd2-4f1c-a2a2-7ca24b594073 req-76341225-1fb0-4150-97cb-7c09e6c1ac2e service nova] Releasing lock "refresh_cache-c3f830d6-8999-49d5-a431-b09dfdaf8313" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.810259] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "refresh_cache-2b01eeae-64be-44b3-b4cf-c2a8490043e3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.810440] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquired lock "refresh_cache-2b01eeae-64be-44b3-b4cf-c2a8490043e3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.810541] env[61629]: DEBUG nova.network.neutron [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 853.997156] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ffe926-2269-4cbe-ab16-5977f36cf42d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.005079] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3ef906-0d2f-40b8-8a6c-b5a6c0688668 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.033994] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b050d8f-09a3-4fe4-b59a-80d38d10cf5e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.041624] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e50a9d-f56f-4e86-8aa1-98e35e9bb4ff {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.056064] env[61629]: INFO nova.compute.manager [-] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Took 1.25 seconds to deallocate network for instance. [ 854.056631] env[61629]: DEBUG nova.compute.provider_tree [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.172287] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52cca22e-9082-23ab-56a4-06d47ea52195, 'name': SearchDatastore_Task, 'duration_secs': 0.00917} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.172563] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.172818] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] c3f830d6-8999-49d5-a431-b09dfdaf8313/c3f830d6-8999-49d5-a431-b09dfdaf8313.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 854.173087] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-332b6035-9857-42b0-915c-5875fa9299e2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.179841] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Waiting for the task: (returnval){ [ 854.179841] env[61629]: value = "task-1354102" [ 854.179841] env[61629]: _type = "Task" [ 854.179841] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.187351] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': task-1354102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.273346] env[61629]: DEBUG nova.compute.manager [req-62453c16-cc54-47e5-8cd0-fb60b3831891 req-278a6a50-554d-42f3-9692-2f00b3b165b8 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Received event network-vif-plugged-7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 854.273670] env[61629]: DEBUG oslo_concurrency.lockutils [req-62453c16-cc54-47e5-8cd0-fb60b3831891 req-278a6a50-554d-42f3-9692-2f00b3b165b8 service nova] Acquiring lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.273866] env[61629]: DEBUG oslo_concurrency.lockutils [req-62453c16-cc54-47e5-8cd0-fb60b3831891 req-278a6a50-554d-42f3-9692-2f00b3b165b8 service nova] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.274035] env[61629]: DEBUG oslo_concurrency.lockutils [req-62453c16-cc54-47e5-8cd0-fb60b3831891 req-278a6a50-554d-42f3-9692-2f00b3b165b8 service nova] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.274209] env[61629]: DEBUG nova.compute.manager [req-62453c16-cc54-47e5-8cd0-fb60b3831891 req-278a6a50-554d-42f3-9692-2f00b3b165b8 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] No waiting events found dispatching network-vif-plugged-7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 854.274373] env[61629]: WARNING nova.compute.manager [req-62453c16-cc54-47e5-8cd0-fb60b3831891 req-278a6a50-554d-42f3-9692-2f00b3b165b8 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Received unexpected event network-vif-plugged-7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3 for instance with vm_state building and task_state spawning. [ 854.274592] env[61629]: DEBUG nova.compute.manager [req-62453c16-cc54-47e5-8cd0-fb60b3831891 req-278a6a50-554d-42f3-9692-2f00b3b165b8 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Received event network-changed-7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 854.274676] env[61629]: DEBUG nova.compute.manager [req-62453c16-cc54-47e5-8cd0-fb60b3831891 req-278a6a50-554d-42f3-9692-2f00b3b165b8 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Refreshing instance network info cache due to event network-changed-7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 854.274835] env[61629]: DEBUG oslo_concurrency.lockutils [req-62453c16-cc54-47e5-8cd0-fb60b3831891 req-278a6a50-554d-42f3-9692-2f00b3b165b8 service nova] Acquiring lock "refresh_cache-2b01eeae-64be-44b3-b4cf-c2a8490043e3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.356906] env[61629]: DEBUG nova.network.neutron [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 854.523691] env[61629]: DEBUG nova.network.neutron [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Updating instance_info_cache with network_info: [{"id": "7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3", "address": "fa:16:3e:c9:0f:8e", "network": {"id": "3cf05e01-9439-42dd-803f-fa703eb96988", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1374890316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b0b101e81dfe4c8b98314be278282c0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ee83b47-4c", "ovs_interfaceid": "7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.560961] env[61629]: DEBUG nova.scheduler.client.report [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 854.566128] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.691278] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': task-1354102, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.750928] env[61629]: DEBUG nova.compute.manager [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 854.781453] env[61629]: DEBUG nova.virt.hardware [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 854.781732] env[61629]: DEBUG nova.virt.hardware [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 854.781896] env[61629]: DEBUG nova.virt.hardware [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 854.782087] env[61629]: DEBUG nova.virt.hardware [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 854.782236] env[61629]: DEBUG nova.virt.hardware [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 854.782403] env[61629]: DEBUG nova.virt.hardware [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 854.782634] env[61629]: DEBUG nova.virt.hardware [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 854.782796] env[61629]: DEBUG nova.virt.hardware [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 854.782974] env[61629]: DEBUG nova.virt.hardware [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 854.783162] env[61629]: DEBUG nova.virt.hardware [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 854.783423] env[61629]: DEBUG nova.virt.hardware [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 854.784629] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a790f762-549d-4e2e-bdfb-bf2b55718b4b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.793771] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727b7f1e-aacb-4475-be51-06475774242b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.030552] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Releasing lock "refresh_cache-2b01eeae-64be-44b3-b4cf-c2a8490043e3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.030900] env[61629]: DEBUG nova.compute.manager [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Instance network_info: |[{"id": "7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3", "address": "fa:16:3e:c9:0f:8e", "network": {"id": "3cf05e01-9439-42dd-803f-fa703eb96988", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1374890316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b0b101e81dfe4c8b98314be278282c0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ee83b47-4c", "ovs_interfaceid": "7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 855.031315] env[61629]: DEBUG oslo_concurrency.lockutils [req-62453c16-cc54-47e5-8cd0-fb60b3831891 req-278a6a50-554d-42f3-9692-2f00b3b165b8 service nova] Acquired lock "refresh_cache-2b01eeae-64be-44b3-b4cf-c2a8490043e3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.031567] env[61629]: DEBUG nova.network.neutron [req-62453c16-cc54-47e5-8cd0-fb60b3831891 req-278a6a50-554d-42f3-9692-2f00b3b165b8 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Refreshing network info cache for port 7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 855.032847] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:0f:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9aa05ef8-c7bb-4af5-983f-bfa0f3f88223', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 855.042610] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Creating folder: Project (b0b101e81dfe4c8b98314be278282c0d). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 855.044219] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3bf2a66e-e7b3-40e9-bf5d-f40d391e6eea {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.058106] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Created folder: Project (b0b101e81dfe4c8b98314be278282c0d) in parent group-v288443. [ 855.058479] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Creating folder: Instances. Parent ref: group-v288484. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 855.058846] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b162093-6e8e-455a-a7dd-11745823da06 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.067967] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.343s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.068822] env[61629]: DEBUG nova.compute.manager [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 855.074709] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.939s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.077208] env[61629]: INFO nova.compute.claims [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 855.081528] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Created folder: Instances in parent group-v288484. [ 855.081911] env[61629]: DEBUG oslo.service.loopingcall [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.083058] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 855.083406] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0325f973-f53a-462b-b48a-7f5082b6b061 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.116497] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 855.116497] env[61629]: value = "task-1354105" [ 855.116497] env[61629]: _type = "Task" [ 855.116497] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.127324] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354105, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.156102] env[61629]: DEBUG nova.compute.manager [req-bce1c5d6-a430-4430-b5cc-96e9409298f4 req-7e118f59-300b-451c-a49f-26cbe5f51e77 service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Received event network-vif-plugged-26b1c08a-ffa0-488a-ae0b-482ca395c8ad {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 855.156467] env[61629]: DEBUG oslo_concurrency.lockutils [req-bce1c5d6-a430-4430-b5cc-96e9409298f4 req-7e118f59-300b-451c-a49f-26cbe5f51e77 service nova] Acquiring lock "68c1e93a-2829-4764-a900-75c3479b4715-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.156941] env[61629]: DEBUG oslo_concurrency.lockutils [req-bce1c5d6-a430-4430-b5cc-96e9409298f4 req-7e118f59-300b-451c-a49f-26cbe5f51e77 service nova] Lock "68c1e93a-2829-4764-a900-75c3479b4715-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.158377] env[61629]: DEBUG oslo_concurrency.lockutils [req-bce1c5d6-a430-4430-b5cc-96e9409298f4 req-7e118f59-300b-451c-a49f-26cbe5f51e77 service nova] Lock "68c1e93a-2829-4764-a900-75c3479b4715-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.158377] env[61629]: DEBUG nova.compute.manager [req-bce1c5d6-a430-4430-b5cc-96e9409298f4 req-7e118f59-300b-451c-a49f-26cbe5f51e77 service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] No waiting events found dispatching network-vif-plugged-26b1c08a-ffa0-488a-ae0b-482ca395c8ad {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 855.158377] env[61629]: WARNING nova.compute.manager [req-bce1c5d6-a430-4430-b5cc-96e9409298f4 req-7e118f59-300b-451c-a49f-26cbe5f51e77 service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Received unexpected event network-vif-plugged-26b1c08a-ffa0-488a-ae0b-482ca395c8ad for instance with vm_state building and task_state spawning. [ 855.188331] env[61629]: DEBUG nova.network.neutron [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Successfully updated port: 26b1c08a-ffa0-488a-ae0b-482ca395c8ad {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 855.195551] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': task-1354102, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514179} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.195551] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] c3f830d6-8999-49d5-a431-b09dfdaf8313/c3f830d6-8999-49d5-a431-b09dfdaf8313.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 855.195551] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 855.195721] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a44530ea-f407-44e5-8393-beb140909d2c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.203538] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Waiting for the task: (returnval){ [ 855.203538] env[61629]: value = "task-1354106" [ 855.203538] env[61629]: _type = "Task" [ 855.203538] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.212704] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': task-1354106, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.584595] env[61629]: DEBUG nova.compute.utils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 855.588265] env[61629]: DEBUG nova.compute.manager [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 855.588373] env[61629]: DEBUG nova.network.neutron [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 855.627341] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354105, 'name': CreateVM_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.643521] env[61629]: DEBUG nova.policy [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ff5dd6ffbf5452e8f56a1f64ae175b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67e38fd8e30349c6857025719fd26211', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 855.692766] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquiring lock "refresh_cache-68c1e93a-2829-4764-a900-75c3479b4715" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.693068] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquired lock "refresh_cache-68c1e93a-2829-4764-a900-75c3479b4715" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.693815] env[61629]: DEBUG nova.network.neutron [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 855.713410] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': task-1354106, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.132454} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.713681] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 855.714446] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc320d8e-892d-4641-8ddc-c3efd3ea7738 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.737404] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] c3f830d6-8999-49d5-a431-b09dfdaf8313/c3f830d6-8999-49d5-a431-b09dfdaf8313.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 855.740180] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6836621-bc08-4835-b93b-8c219e33b354 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.760421] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Waiting for the task: (returnval){ [ 855.760421] env[61629]: value = "task-1354107" [ 855.760421] env[61629]: _type = "Task" [ 855.760421] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.769857] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': task-1354107, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.788534] env[61629]: DEBUG nova.network.neutron [req-62453c16-cc54-47e5-8cd0-fb60b3831891 req-278a6a50-554d-42f3-9692-2f00b3b165b8 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Updated VIF entry in instance network info cache for port 7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 855.788997] env[61629]: DEBUG nova.network.neutron [req-62453c16-cc54-47e5-8cd0-fb60b3831891 req-278a6a50-554d-42f3-9692-2f00b3b165b8 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Updating instance_info_cache with network_info: [{"id": "7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3", "address": "fa:16:3e:c9:0f:8e", "network": {"id": "3cf05e01-9439-42dd-803f-fa703eb96988", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1374890316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b0b101e81dfe4c8b98314be278282c0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ee83b47-4c", "ovs_interfaceid": "7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.993556] env[61629]: DEBUG nova.network.neutron [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Successfully created port: 51940ecf-0cf7-40a7-ad25-0aab2c24a535 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 856.089039] env[61629]: DEBUG nova.compute.manager [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 856.128473] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354105, 'name': CreateVM_Task, 'duration_secs': 0.807882} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.130847] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 856.132231] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.132392] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.132714] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 856.133210] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21057ee8-3762-445e-bd28-3b8b30151a0b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.137755] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 856.137755] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52bc5293-de86-b020-66f6-8f2467a3c415" [ 856.137755] env[61629]: _type = "Task" [ 856.137755] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.148097] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52bc5293-de86-b020-66f6-8f2467a3c415, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.236396] env[61629]: DEBUG nova.network.neutron [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 856.272593] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': task-1354107, 'name': ReconfigVM_Task, 'duration_secs': 0.28881} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.273520] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Reconfigured VM instance instance-00000042 to attach disk [datastore2] c3f830d6-8999-49d5-a431-b09dfdaf8313/c3f830d6-8999-49d5-a431-b09dfdaf8313.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 856.273576] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a9626cc2-cb0f-4b8e-aebc-a03ce7ba7052 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.282545] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Waiting for the task: (returnval){ [ 856.282545] env[61629]: value = "task-1354108" [ 856.282545] env[61629]: _type = "Task" [ 856.282545] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.293181] env[61629]: DEBUG oslo_concurrency.lockutils [req-62453c16-cc54-47e5-8cd0-fb60b3831891 req-278a6a50-554d-42f3-9692-2f00b3b165b8 service nova] Releasing lock "refresh_cache-2b01eeae-64be-44b3-b4cf-c2a8490043e3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.293711] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': task-1354108, 'name': Rename_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.383915] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb5248e-2ed4-4161-824b-11e8d456380a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.392239] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f37ef43-401a-4794-a978-4b862c32540d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.396228] env[61629]: DEBUG nova.network.neutron [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Updating instance_info_cache with network_info: [{"id": "26b1c08a-ffa0-488a-ae0b-482ca395c8ad", "address": "fa:16:3e:17:c6:e3", "network": {"id": "a7162f2a-5965-4793-b504-070d397c4652", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-205722911-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27cc5f4c983a4a40aca3f207a6fed658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26b1c08a-ff", "ovs_interfaceid": "26b1c08a-ffa0-488a-ae0b-482ca395c8ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.425598] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667cf59b-f366-4fb3-8272-d49b5a76b0e7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.433477] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa8faec-3e4b-41a6-8a17-db0c8c128403 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.446995] env[61629]: DEBUG nova.compute.provider_tree [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.651350] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52bc5293-de86-b020-66f6-8f2467a3c415, 'name': SearchDatastore_Task, 'duration_secs': 0.008837} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.651696] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.651940] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 856.652220] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.652369] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.652544] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 856.652795] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55d5ff75-1068-437a-b8ba-d18253db0b5a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.660219] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 856.660506] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 856.661489] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97030128-f82f-49b2-bdd4-095f9bd5bcee {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.667056] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 856.667056] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]524f9094-fd16-a90e-5d0c-eaeaad0df3c5" [ 856.667056] env[61629]: _type = "Task" [ 856.667056] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.674232] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]524f9094-fd16-a90e-5d0c-eaeaad0df3c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.793712] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': task-1354108, 'name': Rename_Task, 'duration_secs': 0.132672} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.794052] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 856.794314] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4dc7521f-d3f7-465b-9e25-568190fc36c4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.800777] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Waiting for the task: (returnval){ [ 856.800777] env[61629]: value = "task-1354109" [ 856.800777] env[61629]: _type = "Task" [ 856.800777] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.808130] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': task-1354109, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.898451] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Releasing lock "refresh_cache-68c1e93a-2829-4764-a900-75c3479b4715" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.898898] env[61629]: DEBUG nova.compute.manager [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Instance network_info: |[{"id": "26b1c08a-ffa0-488a-ae0b-482ca395c8ad", "address": "fa:16:3e:17:c6:e3", "network": {"id": "a7162f2a-5965-4793-b504-070d397c4652", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-205722911-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27cc5f4c983a4a40aca3f207a6fed658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26b1c08a-ff", "ovs_interfaceid": "26b1c08a-ffa0-488a-ae0b-482ca395c8ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 856.899495] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:c6:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eed34ae1-5f7f-4deb-9db8-85eaa1e60c29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '26b1c08a-ffa0-488a-ae0b-482ca395c8ad', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.907811] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Creating folder: Project (27cc5f4c983a4a40aca3f207a6fed658). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 856.908135] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e289eb6-ada3-4470-9a71-debac0cfcf4a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.919424] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Created folder: Project (27cc5f4c983a4a40aca3f207a6fed658) in parent group-v288443. [ 856.919697] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Creating folder: Instances. Parent ref: group-v288487. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 856.919986] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a50b8bad-8bf3-409b-b155-99b673811a4d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.929858] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Created folder: Instances in parent group-v288487. [ 856.930121] env[61629]: DEBUG oslo.service.loopingcall [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.930327] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 856.930536] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43fdd152-94bf-4966-b21a-cccddc8e33d7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.949816] env[61629]: DEBUG nova.scheduler.client.report [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 856.954241] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.954241] env[61629]: value = "task-1354112" [ 856.954241] env[61629]: _type = "Task" [ 856.954241] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.962627] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354112, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.105258] env[61629]: DEBUG nova.compute.manager [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 857.131314] env[61629]: DEBUG nova.virt.hardware [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 857.131653] env[61629]: DEBUG nova.virt.hardware [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 857.131863] env[61629]: DEBUG nova.virt.hardware [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 857.132137] env[61629]: DEBUG nova.virt.hardware [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 857.132318] env[61629]: DEBUG nova.virt.hardware [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 857.132490] env[61629]: DEBUG nova.virt.hardware [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 857.132735] env[61629]: DEBUG nova.virt.hardware [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 857.132937] env[61629]: DEBUG nova.virt.hardware [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 857.133159] env[61629]: DEBUG nova.virt.hardware [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 857.133367] env[61629]: DEBUG nova.virt.hardware [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 857.133587] env[61629]: DEBUG nova.virt.hardware [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 857.134454] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7951697c-7044-4015-b83e-2e14929faeb4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.142151] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205c939b-4a25-4acb-8386-5a225c7dd333 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.177809] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]524f9094-fd16-a90e-5d0c-eaeaad0df3c5, 'name': SearchDatastore_Task, 'duration_secs': 0.00898} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.178754] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7a1e260-95f1-4506-96bb-0e6af0356a62 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.183105] env[61629]: DEBUG nova.compute.manager [req-576185f4-1ca5-4623-bc10-a5a0d8137b71 req-66e0a441-a96c-4a53-a9eb-d580c543c63c service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Received event network-changed-26b1c08a-ffa0-488a-ae0b-482ca395c8ad {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 857.183260] env[61629]: DEBUG nova.compute.manager [req-576185f4-1ca5-4623-bc10-a5a0d8137b71 req-66e0a441-a96c-4a53-a9eb-d580c543c63c service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Refreshing instance network info cache due to event network-changed-26b1c08a-ffa0-488a-ae0b-482ca395c8ad. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 857.183496] env[61629]: DEBUG oslo_concurrency.lockutils [req-576185f4-1ca5-4623-bc10-a5a0d8137b71 req-66e0a441-a96c-4a53-a9eb-d580c543c63c service nova] Acquiring lock "refresh_cache-68c1e93a-2829-4764-a900-75c3479b4715" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.183641] env[61629]: DEBUG oslo_concurrency.lockutils [req-576185f4-1ca5-4623-bc10-a5a0d8137b71 req-66e0a441-a96c-4a53-a9eb-d580c543c63c service nova] Acquired lock "refresh_cache-68c1e93a-2829-4764-a900-75c3479b4715" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.183801] env[61629]: DEBUG nova.network.neutron [req-576185f4-1ca5-4623-bc10-a5a0d8137b71 req-66e0a441-a96c-4a53-a9eb-d580c543c63c service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Refreshing network info cache for port 26b1c08a-ffa0-488a-ae0b-482ca395c8ad {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 857.188717] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 857.188717] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]522c23f0-fb13-bf55-9b76-12069232513a" [ 857.188717] env[61629]: _type = "Task" [ 857.188717] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.198014] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]522c23f0-fb13-bf55-9b76-12069232513a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.311735] env[61629]: DEBUG oslo_vmware.api [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': task-1354109, 'name': PowerOnVM_Task, 'duration_secs': 0.446286} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.312715] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 857.312715] env[61629]: INFO nova.compute.manager [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Took 7.39 seconds to spawn the instance on the hypervisor. [ 857.312898] env[61629]: DEBUG nova.compute.manager [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 857.313703] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da7bc35-442d-4400-ab9c-fe6ea2e67f4c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.455841] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.381s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.456743] env[61629]: DEBUG nova.compute.manager [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 857.459732] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.999s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.461242] env[61629]: INFO nova.compute.claims [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 857.474870] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354112, 'name': CreateVM_Task, 'duration_secs': 0.342633} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.475150] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 857.475957] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.476219] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.476835] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 857.477162] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6da5964-799c-402a-ab59-0b8b535801b2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.482200] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Waiting for the task: (returnval){ [ 857.482200] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5232d8fb-a66d-c29f-7d3b-0d783c72de09" [ 857.482200] env[61629]: _type = "Task" [ 857.482200] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.491699] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5232d8fb-a66d-c29f-7d3b-0d783c72de09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.543510] env[61629]: DEBUG nova.compute.manager [req-e222d5aa-8be6-4ff4-8b64-1169c1724937 req-dd6ef434-943f-45e6-a8aa-3071179bdd62 service nova] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Received event network-vif-plugged-51940ecf-0cf7-40a7-ad25-0aab2c24a535 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 857.543883] env[61629]: DEBUG oslo_concurrency.lockutils [req-e222d5aa-8be6-4ff4-8b64-1169c1724937 req-dd6ef434-943f-45e6-a8aa-3071179bdd62 service nova] Acquiring lock "08cb71f4-2ebe-4694-856c-2e772f319cdf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.544163] env[61629]: DEBUG oslo_concurrency.lockutils [req-e222d5aa-8be6-4ff4-8b64-1169c1724937 req-dd6ef434-943f-45e6-a8aa-3071179bdd62 service nova] Lock "08cb71f4-2ebe-4694-856c-2e772f319cdf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.544401] env[61629]: DEBUG oslo_concurrency.lockutils [req-e222d5aa-8be6-4ff4-8b64-1169c1724937 req-dd6ef434-943f-45e6-a8aa-3071179bdd62 service nova] Lock "08cb71f4-2ebe-4694-856c-2e772f319cdf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.544621] env[61629]: DEBUG nova.compute.manager [req-e222d5aa-8be6-4ff4-8b64-1169c1724937 req-dd6ef434-943f-45e6-a8aa-3071179bdd62 service nova] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] No waiting events found dispatching network-vif-plugged-51940ecf-0cf7-40a7-ad25-0aab2c24a535 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 857.544848] env[61629]: WARNING nova.compute.manager [req-e222d5aa-8be6-4ff4-8b64-1169c1724937 req-dd6ef434-943f-45e6-a8aa-3071179bdd62 service nova] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Received unexpected event network-vif-plugged-51940ecf-0cf7-40a7-ad25-0aab2c24a535 for instance with vm_state building and task_state spawning. [ 857.698257] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]522c23f0-fb13-bf55-9b76-12069232513a, 'name': SearchDatastore_Task, 'duration_secs': 0.01102} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.698556] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.698787] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 2b01eeae-64be-44b3-b4cf-c2a8490043e3/2b01eeae-64be-44b3-b4cf-c2a8490043e3.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 857.699056] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b77e198-c6fa-42f9-9413-58cbc5092e08 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.706143] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 857.706143] env[61629]: value = "task-1354113" [ 857.706143] env[61629]: _type = "Task" [ 857.706143] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.713934] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354113, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.832639] env[61629]: INFO nova.compute.manager [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Took 28.14 seconds to build instance. [ 857.900499] env[61629]: DEBUG nova.network.neutron [req-576185f4-1ca5-4623-bc10-a5a0d8137b71 req-66e0a441-a96c-4a53-a9eb-d580c543c63c service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Updated VIF entry in instance network info cache for port 26b1c08a-ffa0-488a-ae0b-482ca395c8ad. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 857.900858] env[61629]: DEBUG nova.network.neutron [req-576185f4-1ca5-4623-bc10-a5a0d8137b71 req-66e0a441-a96c-4a53-a9eb-d580c543c63c service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Updating instance_info_cache with network_info: [{"id": "26b1c08a-ffa0-488a-ae0b-482ca395c8ad", "address": "fa:16:3e:17:c6:e3", "network": {"id": "a7162f2a-5965-4793-b504-070d397c4652", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-205722911-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27cc5f4c983a4a40aca3f207a6fed658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26b1c08a-ff", "ovs_interfaceid": "26b1c08a-ffa0-488a-ae0b-482ca395c8ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.970981] env[61629]: DEBUG nova.compute.utils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 857.974472] env[61629]: DEBUG nova.compute.manager [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 857.974727] env[61629]: DEBUG nova.network.neutron [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 857.995171] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5232d8fb-a66d-c29f-7d3b-0d783c72de09, 'name': SearchDatastore_Task, 'duration_secs': 0.010411} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.995171] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.995171] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 857.995171] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.995171] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.995171] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 857.995646] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a0d5c35-7019-43be-93f5-bd8d8e4d0a7c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.006709] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 858.007455] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 858.007836] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1a853b9-46bd-464f-9dfa-198a87fdcf19 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.013459] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Waiting for the task: (returnval){ [ 858.013459] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5225f360-717c-96b6-b7a1-bdacb9d71514" [ 858.013459] env[61629]: _type = "Task" [ 858.013459] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.022253] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5225f360-717c-96b6-b7a1-bdacb9d71514, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.032117] env[61629]: DEBUG nova.policy [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38cc8b6343d54d30a3f6f13512d23020', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e7fced3a50d4821b42cf087d8111cb7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 858.054258] env[61629]: DEBUG nova.network.neutron [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Successfully updated port: 51940ecf-0cf7-40a7-ad25-0aab2c24a535 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 858.218407] env[61629]: DEBUG nova.compute.manager [req-b6996f42-3cbe-4abe-b67b-2180a5b3b371 req-e9ef9505-1368-4520-9c91-bf02acc5bce3 service nova] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Received event network-changed-51940ecf-0cf7-40a7-ad25-0aab2c24a535 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 858.218624] env[61629]: DEBUG nova.compute.manager [req-b6996f42-3cbe-4abe-b67b-2180a5b3b371 req-e9ef9505-1368-4520-9c91-bf02acc5bce3 service nova] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Refreshing instance network info cache due to event network-changed-51940ecf-0cf7-40a7-ad25-0aab2c24a535. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 858.218869] env[61629]: DEBUG oslo_concurrency.lockutils [req-b6996f42-3cbe-4abe-b67b-2180a5b3b371 req-e9ef9505-1368-4520-9c91-bf02acc5bce3 service nova] Acquiring lock "refresh_cache-08cb71f4-2ebe-4694-856c-2e772f319cdf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.219022] env[61629]: DEBUG oslo_concurrency.lockutils [req-b6996f42-3cbe-4abe-b67b-2180a5b3b371 req-e9ef9505-1368-4520-9c91-bf02acc5bce3 service nova] Acquired lock "refresh_cache-08cb71f4-2ebe-4694-856c-2e772f319cdf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.219183] env[61629]: DEBUG nova.network.neutron [req-b6996f42-3cbe-4abe-b67b-2180a5b3b371 req-e9ef9505-1368-4520-9c91-bf02acc5bce3 service nova] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Refreshing network info cache for port 51940ecf-0cf7-40a7-ad25-0aab2c24a535 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 858.231797] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354113, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.335172] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6f26727e-eb85-4f9a-9c7f-1ab253f6aa4f tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "c3f830d6-8999-49d5-a431-b09dfdaf8313" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.270s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.405063] env[61629]: DEBUG oslo_concurrency.lockutils [req-576185f4-1ca5-4623-bc10-a5a0d8137b71 req-66e0a441-a96c-4a53-a9eb-d580c543c63c service nova] Releasing lock "refresh_cache-68c1e93a-2829-4764-a900-75c3479b4715" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.474966] env[61629]: DEBUG nova.compute.manager [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 858.528703] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5225f360-717c-96b6-b7a1-bdacb9d71514, 'name': SearchDatastore_Task, 'duration_secs': 0.015398} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.529674] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e78f974-84a5-4c00-a7b3-f3bd796edee4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.544044] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Waiting for the task: (returnval){ [ 858.544044] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52431c90-8b22-a050-9669-f2a1e7be08f6" [ 858.544044] env[61629]: _type = "Task" [ 858.544044] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.555646] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquiring lock "refresh_cache-08cb71f4-2ebe-4694-856c-2e772f319cdf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.559884] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52431c90-8b22-a050-9669-f2a1e7be08f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009613} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.559884] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.559996] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 68c1e93a-2829-4764-a900-75c3479b4715/68c1e93a-2829-4764-a900-75c3479b4715.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 858.560278] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a751c43-7209-427a-93b1-8641d7b6b2d7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.566672] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Waiting for the task: (returnval){ [ 858.566672] env[61629]: value = "task-1354114" [ 858.566672] env[61629]: _type = "Task" [ 858.566672] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.575479] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354114, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.602196] env[61629]: DEBUG nova.network.neutron [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Successfully created port: 91aa1640-3097-4a26-9090-4081740f917d {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 858.723019] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354113, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528329} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.723019] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 2b01eeae-64be-44b3-b4cf-c2a8490043e3/2b01eeae-64be-44b3-b4cf-c2a8490043e3.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 858.723019] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 858.723019] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-43aac453-1bc1-43f6-9265-8fa28e614c69 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.732018] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 858.732018] env[61629]: value = "task-1354115" [ 858.732018] env[61629]: _type = "Task" [ 858.732018] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.749018] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354115, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.770882] env[61629]: DEBUG nova.network.neutron [req-b6996f42-3cbe-4abe-b67b-2180a5b3b371 req-e9ef9505-1368-4520-9c91-bf02acc5bce3 service nova] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 858.796709] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbd29cc-ed32-4b0b-bb2f-54d3733e9c29 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.805868] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43ca6cc9-db87-4fac-9a01-b692432d5953 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.845178] env[61629]: DEBUG nova.compute.manager [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 858.851169] env[61629]: DEBUG nova.network.neutron [req-b6996f42-3cbe-4abe-b67b-2180a5b3b371 req-e9ef9505-1368-4520-9c91-bf02acc5bce3 service nova] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.853056] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3c828c-79aa-45e7-93b7-7d69ab4eecd8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.865378] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd988ea4-54d8-4b70-9223-b1d0b5523a8d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.885337] env[61629]: DEBUG nova.compute.provider_tree [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.076839] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354114, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495019} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.077100] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 68c1e93a-2829-4764-a900-75c3479b4715/68c1e93a-2829-4764-a900-75c3479b4715.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 859.077314] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 859.077559] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-55d09b49-6220-4c5c-8328-3cb061aa4d3a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.083151] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Waiting for the task: (returnval){ [ 859.083151] env[61629]: value = "task-1354116" [ 859.083151] env[61629]: _type = "Task" [ 859.083151] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.090421] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354116, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.242288] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354115, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093409} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.242547] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 859.243365] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ece890c-cde5-418a-b96b-81d77b15fbca {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.265266] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 2b01eeae-64be-44b3-b4cf-c2a8490043e3/2b01eeae-64be-44b3-b4cf-c2a8490043e3.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 859.265944] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb9cd277-164a-442b-abcb-08a6443a86d9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.285390] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 859.285390] env[61629]: value = "task-1354117" [ 859.285390] env[61629]: _type = "Task" [ 859.285390] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.292883] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354117, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.357943] env[61629]: DEBUG oslo_concurrency.lockutils [req-b6996f42-3cbe-4abe-b67b-2180a5b3b371 req-e9ef9505-1368-4520-9c91-bf02acc5bce3 service nova] Releasing lock "refresh_cache-08cb71f4-2ebe-4694-856c-2e772f319cdf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.360375] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquired lock "refresh_cache-08cb71f4-2ebe-4694-856c-2e772f319cdf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.360539] env[61629]: DEBUG nova.network.neutron [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 859.375708] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.388428] env[61629]: DEBUG nova.scheduler.client.report [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 859.487347] env[61629]: DEBUG nova.compute.manager [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 859.512172] env[61629]: DEBUG nova.virt.hardware [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 859.512458] env[61629]: DEBUG nova.virt.hardware [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 859.512623] env[61629]: DEBUG nova.virt.hardware [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 859.512824] env[61629]: DEBUG nova.virt.hardware [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 859.513050] env[61629]: DEBUG nova.virt.hardware [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 859.513248] env[61629]: DEBUG nova.virt.hardware [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 859.513527] env[61629]: DEBUG nova.virt.hardware [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 859.513708] env[61629]: DEBUG nova.virt.hardware [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 859.513881] env[61629]: DEBUG nova.virt.hardware [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 859.514058] env[61629]: DEBUG nova.virt.hardware [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 859.514253] env[61629]: DEBUG nova.virt.hardware [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 859.515187] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb45966b-50f4-4014-bb81-fc84c7b9bc86 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.523516] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a010264-5577-420c-a688-cbb21221283d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.593235] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354116, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.30149} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.593354] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 859.594108] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba941613-8425-4e3f-9784-f803793208d7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.616194] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 68c1e93a-2829-4764-a900-75c3479b4715/68c1e93a-2829-4764-a900-75c3479b4715.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 859.616471] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb045ec0-6686-437d-981b-84f93048dc90 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.636277] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Waiting for the task: (returnval){ [ 859.636277] env[61629]: value = "task-1354118" [ 859.636277] env[61629]: _type = "Task" [ 859.636277] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.643479] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354118, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.796818] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354117, 'name': ReconfigVM_Task, 'duration_secs': 0.272579} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.797086] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 2b01eeae-64be-44b3-b4cf-c2a8490043e3/2b01eeae-64be-44b3-b4cf-c2a8490043e3.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 859.797826] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3c4090b5-c7eb-470d-a85c-4bb9d01f53b2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.804362] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 859.804362] env[61629]: value = "task-1354119" [ 859.804362] env[61629]: _type = "Task" [ 859.804362] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.814600] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354119, 'name': Rename_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.895350] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.435s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.896072] env[61629]: DEBUG nova.compute.manager [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 859.898944] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.186s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.900456] env[61629]: INFO nova.compute.claims [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.904687] env[61629]: DEBUG nova.network.neutron [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 860.070590] env[61629]: DEBUG nova.compute.manager [req-c9649c03-d092-4b27-ae95-ffcad469e325 req-06ba8f98-cd73-4a1c-a811-51e19aa1dfa7 service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Received event network-vif-plugged-91aa1640-3097-4a26-9090-4081740f917d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 860.070818] env[61629]: DEBUG oslo_concurrency.lockutils [req-c9649c03-d092-4b27-ae95-ffcad469e325 req-06ba8f98-cd73-4a1c-a811-51e19aa1dfa7 service nova] Acquiring lock "09890839-b1d9-4558-992d-b1a6f4c5f750-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.071060] env[61629]: DEBUG oslo_concurrency.lockutils [req-c9649c03-d092-4b27-ae95-ffcad469e325 req-06ba8f98-cd73-4a1c-a811-51e19aa1dfa7 service nova] Lock "09890839-b1d9-4558-992d-b1a6f4c5f750-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.071265] env[61629]: DEBUG oslo_concurrency.lockutils [req-c9649c03-d092-4b27-ae95-ffcad469e325 req-06ba8f98-cd73-4a1c-a811-51e19aa1dfa7 service nova] Lock "09890839-b1d9-4558-992d-b1a6f4c5f750-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.071389] env[61629]: DEBUG nova.compute.manager [req-c9649c03-d092-4b27-ae95-ffcad469e325 req-06ba8f98-cd73-4a1c-a811-51e19aa1dfa7 service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] No waiting events found dispatching network-vif-plugged-91aa1640-3097-4a26-9090-4081740f917d {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 860.071518] env[61629]: WARNING nova.compute.manager [req-c9649c03-d092-4b27-ae95-ffcad469e325 req-06ba8f98-cd73-4a1c-a811-51e19aa1dfa7 service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Received unexpected event network-vif-plugged-91aa1640-3097-4a26-9090-4081740f917d for instance with vm_state building and task_state spawning. [ 860.106030] env[61629]: DEBUG nova.network.neutron [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Updating instance_info_cache with network_info: [{"id": "51940ecf-0cf7-40a7-ad25-0aab2c24a535", "address": "fa:16:3e:fa:0c:6d", "network": {"id": "80098c9c-3683-4298-9ac9-4cf114589ae1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9a36b70b3bef49e68cbe43ec3eaa5dc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51940ecf-0c", "ovs_interfaceid": "51940ecf-0cf7-40a7-ad25-0aab2c24a535", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.147224] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354118, 'name': ReconfigVM_Task, 'duration_secs': 0.28537} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.147750] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 68c1e93a-2829-4764-a900-75c3479b4715/68c1e93a-2829-4764-a900-75c3479b4715.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 860.148598] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5efe72b1-1210-4e97-b7c7-3690e3c3a4a6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.156223] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Waiting for the task: (returnval){ [ 860.156223] env[61629]: value = "task-1354120" [ 860.156223] env[61629]: _type = "Task" [ 860.156223] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.164493] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354120, 'name': Rename_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.176149] env[61629]: DEBUG nova.network.neutron [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Successfully updated port: 91aa1640-3097-4a26-9090-4081740f917d {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 860.314422] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354119, 'name': Rename_Task, 'duration_secs': 0.156601} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.315649] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 860.315649] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96574681-ede7-4191-880a-10534ff53b4a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.321757] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 860.321757] env[61629]: value = "task-1354121" [ 860.321757] env[61629]: _type = "Task" [ 860.321757] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.330592] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354121, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.405200] env[61629]: DEBUG nova.compute.utils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 860.406736] env[61629]: DEBUG nova.compute.manager [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 860.406908] env[61629]: DEBUG nova.network.neutron [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 860.455778] env[61629]: DEBUG nova.policy [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af30b52d3d0c472a85a0c39058674445', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a6165d04bf0a468faaab339addeaa59e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 860.609391] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Releasing lock "refresh_cache-08cb71f4-2ebe-4694-856c-2e772f319cdf" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.609741] env[61629]: DEBUG nova.compute.manager [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Instance network_info: |[{"id": "51940ecf-0cf7-40a7-ad25-0aab2c24a535", "address": "fa:16:3e:fa:0c:6d", "network": {"id": "80098c9c-3683-4298-9ac9-4cf114589ae1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9a36b70b3bef49e68cbe43ec3eaa5dc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51940ecf-0c", "ovs_interfaceid": "51940ecf-0cf7-40a7-ad25-0aab2c24a535", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 860.610223] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:0c:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '193994c7-8e1b-4f25-a4a4-d0563845eb28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '51940ecf-0cf7-40a7-ad25-0aab2c24a535', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 860.619151] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Creating folder: Project (67e38fd8e30349c6857025719fd26211). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 860.619445] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc9cf56a-c404-4f8d-93b3-2d0679726637 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.631817] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Created folder: Project (67e38fd8e30349c6857025719fd26211) in parent group-v288443. [ 860.632046] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Creating folder: Instances. Parent ref: group-v288490. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 860.632313] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8971643-5071-460d-848c-c7abaad9e0e0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.643020] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Created folder: Instances in parent group-v288490. [ 860.643316] env[61629]: DEBUG oslo.service.loopingcall [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 860.643688] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 860.643725] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97a7641f-6b88-46ff-a13c-c75b1ec9ab4a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.678572] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354120, 'name': Rename_Task, 'duration_secs': 0.140824} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.680087] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.680220] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.680444] env[61629]: DEBUG nova.network.neutron [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 860.681713] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 860.682543] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 860.682543] env[61629]: value = "task-1354124" [ 860.682543] env[61629]: _type = "Task" [ 860.682543] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.682543] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8038d443-1a85-42ef-ae57-a8d543458da1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.693902] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354124, 'name': CreateVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.694821] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Waiting for the task: (returnval){ [ 860.694821] env[61629]: value = "task-1354125" [ 860.694821] env[61629]: _type = "Task" [ 860.694821] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.705544] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354125, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.748157] env[61629]: DEBUG nova.network.neutron [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Successfully created port: 85b39faa-8b58-4b86-b4df-a4b98f2a5325 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 860.832691] env[61629]: DEBUG oslo_vmware.api [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354121, 'name': PowerOnVM_Task, 'duration_secs': 0.489248} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.832985] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 860.833200] env[61629]: INFO nova.compute.manager [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Took 8.51 seconds to spawn the instance on the hypervisor. [ 860.833373] env[61629]: DEBUG nova.compute.manager [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 860.834300] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754bc422-e83e-4cb8-986f-129bf8656105 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.912236] env[61629]: DEBUG nova.compute.manager [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 861.173429] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquiring lock "c3f830d6-8999-49d5-a431-b09dfdaf8313" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.173717] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "c3f830d6-8999-49d5-a431-b09dfdaf8313" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.174334] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquiring lock "c3f830d6-8999-49d5-a431-b09dfdaf8313-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.174334] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "c3f830d6-8999-49d5-a431-b09dfdaf8313-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.174334] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "c3f830d6-8999-49d5-a431-b09dfdaf8313-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.178711] env[61629]: INFO nova.compute.manager [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Terminating instance [ 861.181057] env[61629]: DEBUG nova.compute.manager [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 861.181057] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 861.181553] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908ca27c-da4c-4361-95d4-6684a68a8eaa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.192684] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc112a0-3a43-489c-a68b-947ef353fa86 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.202097] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354124, 'name': CreateVM_Task, 'duration_secs': 0.350789} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.202382] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.206547] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 861.206859] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7518953a-7a8e-4865-89d0-831243ca9acb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.210239] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.210404] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.210748] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 861.211668] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7415e1c2-241e-4e2f-ad2a-0ffaa69fae36 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.218832] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354125, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.221553] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1f15d1-2026-4cce-ab66-0df2fe93f1b1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.225109] env[61629]: DEBUG oslo_vmware.api [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Waiting for the task: (returnval){ [ 861.225109] env[61629]: value = "task-1354126" [ 861.225109] env[61629]: _type = "Task" [ 861.225109] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.255928] env[61629]: DEBUG nova.network.neutron [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 861.260096] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e1e536-541a-41bf-b0d1-2759e0ea8737 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.263731] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Waiting for the task: (returnval){ [ 861.263731] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52db3c95-32e2-2396-8dde-91fb049c0d47" [ 861.263731] env[61629]: _type = "Task" [ 861.263731] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.266381] env[61629]: DEBUG oslo_vmware.api [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': task-1354126, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.273091] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f611d1-0022-4b95-8a4a-773c8a23c377 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.280576] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52db3c95-32e2-2396-8dde-91fb049c0d47, 'name': SearchDatastore_Task, 'duration_secs': 0.012515} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.283192] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.283493] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 861.283760] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.283910] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.284102] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 861.284407] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f250477-3b92-47d5-a45a-1f72dd90e2a1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.296375] env[61629]: DEBUG nova.compute.provider_tree [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.306635] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 861.306820] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 861.307589] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c34991b3-d1f9-4d5c-9d4d-2f0916b23150 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.314048] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Waiting for the task: (returnval){ [ 861.314048] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52bf809f-d194-d5c2-680c-01304f2845f8" [ 861.314048] env[61629]: _type = "Task" [ 861.314048] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.322770] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52bf809f-d194-d5c2-680c-01304f2845f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.354559] env[61629]: INFO nova.compute.manager [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Took 29.68 seconds to build instance. [ 861.426319] env[61629]: DEBUG nova.network.neutron [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updating instance_info_cache with network_info: [{"id": "91aa1640-3097-4a26-9090-4081740f917d", "address": "fa:16:3e:d4:a8:15", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91aa1640-30", "ovs_interfaceid": "91aa1640-3097-4a26-9090-4081740f917d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.711909] env[61629]: DEBUG oslo_vmware.api [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354125, 'name': PowerOnVM_Task, 'duration_secs': 0.614651} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.712202] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 861.712404] env[61629]: INFO nova.compute.manager [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Took 6.96 seconds to spawn the instance on the hypervisor. [ 861.712581] env[61629]: DEBUG nova.compute.manager [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 861.713349] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b12ce49-9c3a-48a4-990d-b846311ea4ee {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.736194] env[61629]: DEBUG oslo_vmware.api [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': task-1354126, 'name': PowerOffVM_Task, 'duration_secs': 0.356901} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.736194] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 861.736194] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 861.736194] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac51807e-728d-42ee-b5be-abc4a471dd0d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.797797] env[61629]: DEBUG nova.scheduler.client.report [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 861.824622] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52bf809f-d194-d5c2-680c-01304f2845f8, 'name': SearchDatastore_Task, 'duration_secs': 0.010312} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.825401] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7e134f4-4a43-4224-9f94-e3e9134b2cff {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.830371] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Waiting for the task: (returnval){ [ 861.830371] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52839d24-690b-ed26-0da3-fa83b1c79639" [ 861.830371] env[61629]: _type = "Task" [ 861.830371] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.837645] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52839d24-690b-ed26-0da3-fa83b1c79639, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.855649] env[61629]: DEBUG oslo_concurrency.lockutils [None req-deea5e95-5abf-42c3-bd0f-f248e9633948 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.570s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.907139] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 861.907361] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 861.907543] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Deleting the datastore file [datastore2] c3f830d6-8999-49d5-a431-b09dfdaf8313 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 861.907843] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad80073e-9990-46a5-8fe2-9beea19a36fa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.914559] env[61629]: DEBUG oslo_vmware.api [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Waiting for the task: (returnval){ [ 861.914559] env[61629]: value = "task-1354128" [ 861.914559] env[61629]: _type = "Task" [ 861.914559] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.922428] env[61629]: DEBUG oslo_vmware.api [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': task-1354128, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.928658] env[61629]: DEBUG nova.compute.manager [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 861.930838] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.931178] env[61629]: DEBUG nova.compute.manager [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Instance network_info: |[{"id": "91aa1640-3097-4a26-9090-4081740f917d", "address": "fa:16:3e:d4:a8:15", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91aa1640-30", "ovs_interfaceid": "91aa1640-3097-4a26-9090-4081740f917d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 861.931780] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:a8:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2c2daf7c-c01b-41b1-a09a-fb8b893b4c80', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91aa1640-3097-4a26-9090-4081740f917d', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 861.939745] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Creating folder: Project (9e7fced3a50d4821b42cf087d8111cb7). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.940430] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a1a49f8-82e0-4e67-b023-02b1e7d681f3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.951147] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Created folder: Project (9e7fced3a50d4821b42cf087d8111cb7) in parent group-v288443. [ 861.951426] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Creating folder: Instances. Parent ref: group-v288493. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.951569] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-154a0c16-f484-4449-9572-5220e3856dcc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.955840] env[61629]: DEBUG nova.virt.hardware [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 861.956065] env[61629]: DEBUG nova.virt.hardware [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 861.956225] env[61629]: DEBUG nova.virt.hardware [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 861.956402] env[61629]: DEBUG nova.virt.hardware [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 861.956555] env[61629]: DEBUG nova.virt.hardware [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 861.956694] env[61629]: DEBUG nova.virt.hardware [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 861.956896] env[61629]: DEBUG nova.virt.hardware [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 861.957070] env[61629]: DEBUG nova.virt.hardware [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 861.957239] env[61629]: DEBUG nova.virt.hardware [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 861.957398] env[61629]: DEBUG nova.virt.hardware [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 861.957566] env[61629]: DEBUG nova.virt.hardware [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 861.958356] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee358697-fbcf-4a10-bab7-c00b4348742b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.961624] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Created folder: Instances in parent group-v288493. [ 861.961843] env[61629]: DEBUG oslo.service.loopingcall [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 861.962356] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 861.962552] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6adac1d6-cb99-4f6b-803a-6462d158ba69 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.979454] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec6a66f-9af9-4df4-a5be-c5c1cfe31313 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.983857] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 861.983857] env[61629]: value = "task-1354131" [ 861.983857] env[61629]: _type = "Task" [ 861.983857] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.999234] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354131, 'name': CreateVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.142849] env[61629]: DEBUG nova.compute.manager [req-c80e0138-fdb4-4615-9e3b-7920473628a7 req-c8dd8794-b11e-4ca8-a0cd-7baf8892ab6a service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Received event network-changed-91aa1640-3097-4a26-9090-4081740f917d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 862.142849] env[61629]: DEBUG nova.compute.manager [req-c80e0138-fdb4-4615-9e3b-7920473628a7 req-c8dd8794-b11e-4ca8-a0cd-7baf8892ab6a service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Refreshing instance network info cache due to event network-changed-91aa1640-3097-4a26-9090-4081740f917d. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 862.142849] env[61629]: DEBUG oslo_concurrency.lockutils [req-c80e0138-fdb4-4615-9e3b-7920473628a7 req-c8dd8794-b11e-4ca8-a0cd-7baf8892ab6a service nova] Acquiring lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.142849] env[61629]: DEBUG oslo_concurrency.lockutils [req-c80e0138-fdb4-4615-9e3b-7920473628a7 req-c8dd8794-b11e-4ca8-a0cd-7baf8892ab6a service nova] Acquired lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.145209] env[61629]: DEBUG nova.network.neutron [req-c80e0138-fdb4-4615-9e3b-7920473628a7 req-c8dd8794-b11e-4ca8-a0cd-7baf8892ab6a service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Refreshing network info cache for port 91aa1640-3097-4a26-9090-4081740f917d {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 862.233951] env[61629]: INFO nova.compute.manager [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Took 26.37 seconds to build instance. [ 862.305024] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.403s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.305024] env[61629]: DEBUG nova.compute.manager [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 862.305551] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.907s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.307118] env[61629]: INFO nova.compute.claims [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 862.344116] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52839d24-690b-ed26-0da3-fa83b1c79639, 'name': SearchDatastore_Task, 'duration_secs': 0.009275} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.344116] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.344116] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 08cb71f4-2ebe-4694-856c-2e772f319cdf/08cb71f4-2ebe-4694-856c-2e772f319cdf.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 862.344116] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e0a9f13-ad87-4f36-99c7-bd45df9fb53a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.353018] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Waiting for the task: (returnval){ [ 862.353018] env[61629]: value = "task-1354132" [ 862.353018] env[61629]: _type = "Task" [ 862.353018] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.361029] env[61629]: DEBUG nova.compute.manager [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 862.362900] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354132, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.424934] env[61629]: DEBUG oslo_vmware.api [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Task: {'id': task-1354128, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19451} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.425415] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.425876] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 862.426225] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 862.428019] env[61629]: INFO nova.compute.manager [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Took 1.25 seconds to destroy the instance on the hypervisor. [ 862.428019] env[61629]: DEBUG oslo.service.loopingcall [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.428019] env[61629]: DEBUG nova.compute.manager [-] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 862.428019] env[61629]: DEBUG nova.network.neutron [-] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 862.481388] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.481388] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.493880] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354131, 'name': CreateVM_Task, 'duration_secs': 0.312739} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.494216] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 862.494975] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.498019] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.498019] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 862.498019] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3228bfbe-456c-486c-b448-c1005e620580 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.502288] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 862.502288] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52060f9e-3137-1330-fc8c-4d2dd34a2c9e" [ 862.502288] env[61629]: _type = "Task" [ 862.502288] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.512545] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52060f9e-3137-1330-fc8c-4d2dd34a2c9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.737084] env[61629]: DEBUG oslo_concurrency.lockutils [None req-218f9561-760c-45dd-9a19-b7c4a7662efa tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "68c1e93a-2829-4764-a900-75c3479b4715" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.189s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.776127] env[61629]: DEBUG nova.network.neutron [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Successfully updated port: 85b39faa-8b58-4b86-b4df-a4b98f2a5325 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 862.812071] env[61629]: DEBUG nova.compute.utils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 862.819421] env[61629]: DEBUG nova.compute.manager [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 862.819885] env[61629]: DEBUG nova.network.neutron [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 862.861771] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354132, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.885406] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.903256] env[61629]: DEBUG nova.policy [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af30b52d3d0c472a85a0c39058674445', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a6165d04bf0a468faaab339addeaa59e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 862.987640] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.987640] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Starting heal instance info cache {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 862.987640] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Rebuilding the list of instances to heal {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 863.019774] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52060f9e-3137-1330-fc8c-4d2dd34a2c9e, 'name': SearchDatastore_Task, 'duration_secs': 0.011017} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.020120] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.020395] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 863.022017] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.022017] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.022017] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 863.022017] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c79c5aec-6899-4356-ad50-c2b485a13c27 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.030329] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 863.030505] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 863.031281] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e01f931-f334-4325-b357-711d5b614b79 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.037990] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 863.037990] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]521c2457-8ce6-4e4d-f979-8e4679cefabf" [ 863.037990] env[61629]: _type = "Task" [ 863.037990] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.045361] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]521c2457-8ce6-4e4d-f979-8e4679cefabf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.131129] env[61629]: DEBUG nova.network.neutron [req-c80e0138-fdb4-4615-9e3b-7920473628a7 req-c8dd8794-b11e-4ca8-a0cd-7baf8892ab6a service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updated VIF entry in instance network info cache for port 91aa1640-3097-4a26-9090-4081740f917d. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 863.131479] env[61629]: DEBUG nova.network.neutron [req-c80e0138-fdb4-4615-9e3b-7920473628a7 req-c8dd8794-b11e-4ca8-a0cd-7baf8892ab6a service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updating instance_info_cache with network_info: [{"id": "91aa1640-3097-4a26-9090-4081740f917d", "address": "fa:16:3e:d4:a8:15", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91aa1640-30", "ovs_interfaceid": "91aa1640-3097-4a26-9090-4081740f917d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.158973] env[61629]: DEBUG nova.compute.manager [req-b68c3323-6a9b-40de-8a0d-60f2610a11fc req-6d657bc2-66e8-4cb6-a389-5898c2837326 service nova] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Received event network-vif-deleted-be65c78b-11aa-43c2-aad9-f7f19a2b47c1 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 863.159188] env[61629]: INFO nova.compute.manager [req-b68c3323-6a9b-40de-8a0d-60f2610a11fc req-6d657bc2-66e8-4cb6-a389-5898c2837326 service nova] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Neutron deleted interface be65c78b-11aa-43c2-aad9-f7f19a2b47c1; detaching it from the instance and deleting it from the info cache [ 863.159346] env[61629]: DEBUG nova.network.neutron [req-b68c3323-6a9b-40de-8a0d-60f2610a11fc req-6d657bc2-66e8-4cb6-a389-5898c2837326 service nova] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.238418] env[61629]: DEBUG nova.compute.manager [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 863.281347] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "refresh_cache-9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.281347] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquired lock "refresh_cache-9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.281347] env[61629]: DEBUG nova.network.neutron [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 863.285379] env[61629]: DEBUG nova.network.neutron [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Successfully created port: b0ccf912-7d97-4281-943f-c7ccdf8eec23 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 863.319965] env[61629]: DEBUG nova.compute.manager [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 863.366703] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354132, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518682} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.366978] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 08cb71f4-2ebe-4694-856c-2e772f319cdf/08cb71f4-2ebe-4694-856c-2e772f319cdf.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 863.367206] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 863.367465] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-536e3e52-3f84-44f3-9f97-5c89f4873407 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.376563] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Waiting for the task: (returnval){ [ 863.376563] env[61629]: value = "task-1354133" [ 863.376563] env[61629]: _type = "Task" [ 863.376563] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.387651] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354133, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.492835] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Skipping network cache update for instance because it is being deleted. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 863.493078] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 863.493225] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 863.493623] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 863.493623] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 863.493623] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 863.501457] env[61629]: DEBUG nova.network.neutron [-] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.512873] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "refresh_cache-dce0c7e1-1e47-49ad-88f7-f8f5e293d239" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.513033] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquired lock "refresh_cache-dce0c7e1-1e47-49ad-88f7-f8f5e293d239" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.513182] env[61629]: DEBUG nova.network.neutron [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Forcefully refreshing network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 863.513336] env[61629]: DEBUG nova.objects.instance [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lazy-loading 'info_cache' on Instance uuid dce0c7e1-1e47-49ad-88f7-f8f5e293d239 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.551503] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]521c2457-8ce6-4e4d-f979-8e4679cefabf, 'name': SearchDatastore_Task, 'duration_secs': 0.009625} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.555757] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7702f39-1d29-4e15-b33d-e331b36a6156 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.561979] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 863.561979] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]527fd2a7-bcac-df77-6906-3b9acd0f0fec" [ 863.561979] env[61629]: _type = "Task" [ 863.561979] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.570384] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]527fd2a7-bcac-df77-6906-3b9acd0f0fec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.633892] env[61629]: DEBUG oslo_concurrency.lockutils [req-c80e0138-fdb4-4615-9e3b-7920473628a7 req-c8dd8794-b11e-4ca8-a0cd-7baf8892ab6a service nova] Releasing lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.661815] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a78026a5-e901-4423-8f09-602bc6084d06 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.670695] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3ab550-2345-4ecf-801a-6801f1cf4afb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.684273] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501591cf-7e17-46ca-aee3-3023c06a6622 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.694047] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5fc9a5-b10a-4250-9f8e-aa9fd83be713 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.702782] env[61629]: DEBUG nova.compute.manager [req-b68c3323-6a9b-40de-8a0d-60f2610a11fc req-6d657bc2-66e8-4cb6-a389-5898c2837326 service nova] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Detach interface failed, port_id=be65c78b-11aa-43c2-aad9-f7f19a2b47c1, reason: Instance c3f830d6-8999-49d5-a431-b09dfdaf8313 could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 863.728695] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6e36df-f7af-483f-adae-9e1a1265612f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.735838] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4fcc429-21bb-45d5-ab79-75b01db541fc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.750867] env[61629]: DEBUG nova.compute.provider_tree [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.763800] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.813214] env[61629]: DEBUG nova.network.neutron [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 863.838826] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquiring lock "68c1e93a-2829-4764-a900-75c3479b4715" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.839018] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "68c1e93a-2829-4764-a900-75c3479b4715" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.839236] env[61629]: INFO nova.compute.manager [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Rebooting instance [ 863.888835] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354133, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.248485} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.889112] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 863.889899] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4576481-c97e-4a6e-b559-e78e81e5f511 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.913658] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 08cb71f4-2ebe-4694-856c-2e772f319cdf/08cb71f4-2ebe-4694-856c-2e772f319cdf.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 863.914311] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-639c3180-9dda-48f0-89c9-167f546b7859 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.936270] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Waiting for the task: (returnval){ [ 863.936270] env[61629]: value = "task-1354134" [ 863.936270] env[61629]: _type = "Task" [ 863.936270] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.944822] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354134, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.979832] env[61629]: DEBUG nova.network.neutron [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Updating instance_info_cache with network_info: [{"id": "85b39faa-8b58-4b86-b4df-a4b98f2a5325", "address": "fa:16:3e:f8:40:65", "network": {"id": "4e6a4470-c260-4226-9409-37a70cc1e8c1", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-817073300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6165d04bf0a468faaab339addeaa59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85b39faa-8b", "ovs_interfaceid": "85b39faa-8b58-4b86-b4df-a4b98f2a5325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.009871] env[61629]: INFO nova.compute.manager [-] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Took 1.58 seconds to deallocate network for instance. [ 864.073608] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]527fd2a7-bcac-df77-6906-3b9acd0f0fec, 'name': SearchDatastore_Task, 'duration_secs': 0.026109} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.073916] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.074198] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 09890839-b1d9-4558-992d-b1a6f4c5f750/09890839-b1d9-4558-992d-b1a6f4c5f750.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 864.074715] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f434d964-0fda-4403-a5a6-1840eca3354c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.082106] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 864.082106] env[61629]: value = "task-1354135" [ 864.082106] env[61629]: _type = "Task" [ 864.082106] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.091138] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.173052] env[61629]: DEBUG nova.compute.manager [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Received event network-vif-plugged-85b39faa-8b58-4b86-b4df-a4b98f2a5325 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 864.173052] env[61629]: DEBUG oslo_concurrency.lockutils [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] Acquiring lock "9c340ca1-75e0-4d65-8aae-0d5e11ff3e66-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.173236] env[61629]: DEBUG oslo_concurrency.lockutils [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] Lock "9c340ca1-75e0-4d65-8aae-0d5e11ff3e66-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.173487] env[61629]: DEBUG oslo_concurrency.lockutils [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] Lock "9c340ca1-75e0-4d65-8aae-0d5e11ff3e66-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.173808] env[61629]: DEBUG nova.compute.manager [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] No waiting events found dispatching network-vif-plugged-85b39faa-8b58-4b86-b4df-a4b98f2a5325 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 864.174044] env[61629]: WARNING nova.compute.manager [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Received unexpected event network-vif-plugged-85b39faa-8b58-4b86-b4df-a4b98f2a5325 for instance with vm_state building and task_state spawning. [ 864.174281] env[61629]: DEBUG nova.compute.manager [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Received event network-changed-85b39faa-8b58-4b86-b4df-a4b98f2a5325 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 864.174452] env[61629]: DEBUG nova.compute.manager [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Refreshing instance network info cache due to event network-changed-85b39faa-8b58-4b86-b4df-a4b98f2a5325. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 864.174620] env[61629]: DEBUG oslo_concurrency.lockutils [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] Acquiring lock "refresh_cache-9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.255674] env[61629]: DEBUG nova.scheduler.client.report [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 864.343273] env[61629]: DEBUG nova.compute.manager [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 864.377213] env[61629]: DEBUG nova.virt.hardware [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 864.377560] env[61629]: DEBUG nova.virt.hardware [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 864.377639] env[61629]: DEBUG nova.virt.hardware [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.377818] env[61629]: DEBUG nova.virt.hardware [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 864.377994] env[61629]: DEBUG nova.virt.hardware [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.378886] env[61629]: DEBUG nova.virt.hardware [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 864.379136] env[61629]: DEBUG nova.virt.hardware [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 864.379305] env[61629]: DEBUG nova.virt.hardware [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 864.379479] env[61629]: DEBUG nova.virt.hardware [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 864.379670] env[61629]: DEBUG nova.virt.hardware [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 864.380157] env[61629]: DEBUG nova.virt.hardware [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 864.380767] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755376e2-a2fa-4f33-813b-39199271eabd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.384704] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquiring lock "refresh_cache-68c1e93a-2829-4764-a900-75c3479b4715" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.384862] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquired lock "refresh_cache-68c1e93a-2829-4764-a900-75c3479b4715" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.385063] env[61629]: DEBUG nova.network.neutron [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 864.392834] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcd486a-7dd9-4e81-9fbf-dbc88c04de39 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.448815] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354134, 'name': ReconfigVM_Task, 'duration_secs': 0.373868} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.449151] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 08cb71f4-2ebe-4694-856c-2e772f319cdf/08cb71f4-2ebe-4694-856c-2e772f319cdf.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 864.449835] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e0230ec-1e00-4b1f-94ba-ccb45e1ad184 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.460024] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Waiting for the task: (returnval){ [ 864.460024] env[61629]: value = "task-1354136" [ 864.460024] env[61629]: _type = "Task" [ 864.460024] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.470050] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354136, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.484580] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Releasing lock "refresh_cache-9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.484580] env[61629]: DEBUG nova.compute.manager [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Instance network_info: |[{"id": "85b39faa-8b58-4b86-b4df-a4b98f2a5325", "address": "fa:16:3e:f8:40:65", "network": {"id": "4e6a4470-c260-4226-9409-37a70cc1e8c1", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-817073300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6165d04bf0a468faaab339addeaa59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85b39faa-8b", "ovs_interfaceid": "85b39faa-8b58-4b86-b4df-a4b98f2a5325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 864.484580] env[61629]: DEBUG oslo_concurrency.lockutils [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] Acquired lock "refresh_cache-9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.484580] env[61629]: DEBUG nova.network.neutron [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Refreshing network info cache for port 85b39faa-8b58-4b86-b4df-a4b98f2a5325 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 864.486019] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:40:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85b39faa-8b58-4b86-b4df-a4b98f2a5325', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 864.494225] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Creating folder: Project (a6165d04bf0a468faaab339addeaa59e). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 864.494833] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5af22ab9-bd3a-4750-87ca-956b1e83b6a3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.507149] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Created folder: Project (a6165d04bf0a468faaab339addeaa59e) in parent group-v288443. [ 864.509815] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Creating folder: Instances. Parent ref: group-v288496. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 864.509815] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad130db4-151d-499e-b97a-b21936c77891 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.516410] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.518620] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Created folder: Instances in parent group-v288496. [ 864.518771] env[61629]: DEBUG oslo.service.loopingcall [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 864.518981] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 864.521470] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-38f0b0a3-43e4-49ee-b35d-c1711c0f1c62 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.545327] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 864.545327] env[61629]: value = "task-1354139" [ 864.545327] env[61629]: _type = "Task" [ 864.545327] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.556940] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354139, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.558043] env[61629]: DEBUG nova.network.neutron [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 864.596143] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354135, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.764781] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.457s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.764781] env[61629]: DEBUG nova.compute.manager [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 864.767522] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.459s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.770300] env[61629]: INFO nova.compute.claims [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 864.975523] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354136, 'name': Rename_Task, 'duration_secs': 0.254358} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.976448] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 864.976448] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2bcb7d2-d5ed-45a6-a222-68d7d61a10fa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.986341] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Waiting for the task: (returnval){ [ 864.986341] env[61629]: value = "task-1354140" [ 864.986341] env[61629]: _type = "Task" [ 864.986341] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.001781] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354140, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.055253] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354139, 'name': CreateVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.091325] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354135, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.611742} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.091852] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 09890839-b1d9-4558-992d-b1a6f4c5f750/09890839-b1d9-4558-992d-b1a6f4c5f750.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 865.094507] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 865.094507] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5a0bdf3f-1a93-46cb-9619-a8da21d8237b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.100025] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 865.100025] env[61629]: value = "task-1354141" [ 865.100025] env[61629]: _type = "Task" [ 865.100025] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.107127] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354141, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.164943] env[61629]: DEBUG nova.network.neutron [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Successfully updated port: b0ccf912-7d97-4281-943f-c7ccdf8eec23 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 865.217763] env[61629]: DEBUG nova.compute.manager [req-f6db6542-0dc9-45ff-936d-db3fbe8eb650 req-73658c86-c200-4b59-9c37-7256a3444138 service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Received event network-changed-26b1c08a-ffa0-488a-ae0b-482ca395c8ad {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 865.217763] env[61629]: DEBUG nova.compute.manager [req-f6db6542-0dc9-45ff-936d-db3fbe8eb650 req-73658c86-c200-4b59-9c37-7256a3444138 service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Refreshing instance network info cache due to event network-changed-26b1c08a-ffa0-488a-ae0b-482ca395c8ad. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 865.217763] env[61629]: DEBUG oslo_concurrency.lockutils [req-f6db6542-0dc9-45ff-936d-db3fbe8eb650 req-73658c86-c200-4b59-9c37-7256a3444138 service nova] Acquiring lock "refresh_cache-68c1e93a-2829-4764-a900-75c3479b4715" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.281020] env[61629]: DEBUG nova.compute.utils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 865.283408] env[61629]: DEBUG nova.compute.manager [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 865.285033] env[61629]: DEBUG nova.network.neutron [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 865.309326] env[61629]: DEBUG nova.network.neutron [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.320154] env[61629]: DEBUG nova.network.neutron [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Updating instance_info_cache with network_info: [{"id": "26b1c08a-ffa0-488a-ae0b-482ca395c8ad", "address": "fa:16:3e:17:c6:e3", "network": {"id": "a7162f2a-5965-4793-b504-070d397c4652", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-205722911-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27cc5f4c983a4a40aca3f207a6fed658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26b1c08a-ff", "ovs_interfaceid": "26b1c08a-ffa0-488a-ae0b-482ca395c8ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.325204] env[61629]: DEBUG nova.policy [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af30b52d3d0c472a85a0c39058674445', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a6165d04bf0a468faaab339addeaa59e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 865.397756] env[61629]: DEBUG nova.network.neutron [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Updated VIF entry in instance network info cache for port 85b39faa-8b58-4b86-b4df-a4b98f2a5325. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 865.398249] env[61629]: DEBUG nova.network.neutron [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Updating instance_info_cache with network_info: [{"id": "85b39faa-8b58-4b86-b4df-a4b98f2a5325", "address": "fa:16:3e:f8:40:65", "network": {"id": "4e6a4470-c260-4226-9409-37a70cc1e8c1", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-817073300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6165d04bf0a468faaab339addeaa59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85b39faa-8b", "ovs_interfaceid": "85b39faa-8b58-4b86-b4df-a4b98f2a5325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.495118] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354140, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.557446] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354139, 'name': CreateVM_Task, 'duration_secs': 0.565993} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.557446] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 865.558104] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.558342] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.558607] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 865.558864] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcc1f288-f6b6-49fc-a236-6acbadd313ed {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.563531] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 865.563531] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52e8d3d1-cb5f-3ff0-e848-c52c5772d2d6" [ 865.563531] env[61629]: _type = "Task" [ 865.563531] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.571768] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52e8d3d1-cb5f-3ff0-e848-c52c5772d2d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.603704] env[61629]: DEBUG nova.network.neutron [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Successfully created port: f083b4ff-bb03-4d2c-90b7-524af188ccb0 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 865.612201] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354141, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067119} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.612201] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 865.612201] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada1fd32-8fdb-4c99-abe0-9e907b2a8500 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.633781] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 09890839-b1d9-4558-992d-b1a6f4c5f750/09890839-b1d9-4558-992d-b1a6f4c5f750.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.634103] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b97b52c-33bb-4db4-b94f-661336d2303f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.653950] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 865.653950] env[61629]: value = "task-1354142" [ 865.653950] env[61629]: _type = "Task" [ 865.653950] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.661968] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354142, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.666620] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "refresh_cache-d37958f8-7607-418b-9cfd-c3a5df721e94" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.666764] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquired lock "refresh_cache-d37958f8-7607-418b-9cfd-c3a5df721e94" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.666916] env[61629]: DEBUG nova.network.neutron [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.785904] env[61629]: DEBUG nova.compute.manager [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 865.814029] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Releasing lock "refresh_cache-dce0c7e1-1e47-49ad-88f7-f8f5e293d239" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.814029] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Updated the network info_cache for instance {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 865.814029] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.814029] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.814029] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.814029] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.814029] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.814029] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.814029] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61629) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 865.814029] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager.update_available_resource {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.822357] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Releasing lock "refresh_cache-68c1e93a-2829-4764-a900-75c3479b4715" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.828025] env[61629]: DEBUG oslo_concurrency.lockutils [req-f6db6542-0dc9-45ff-936d-db3fbe8eb650 req-73658c86-c200-4b59-9c37-7256a3444138 service nova] Acquired lock "refresh_cache-68c1e93a-2829-4764-a900-75c3479b4715" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.828025] env[61629]: DEBUG nova.network.neutron [req-f6db6542-0dc9-45ff-936d-db3fbe8eb650 req-73658c86-c200-4b59-9c37-7256a3444138 service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Refreshing network info cache for port 26b1c08a-ffa0-488a-ae0b-482ca395c8ad {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 865.828172] env[61629]: DEBUG nova.compute.manager [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 865.829153] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da719d13-8bc9-4acb-b1c8-a18f4610bf1c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.900571] env[61629]: DEBUG oslo_concurrency.lockutils [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] Releasing lock "refresh_cache-9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.900833] env[61629]: DEBUG nova.compute.manager [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Received event network-changed-7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 865.901019] env[61629]: DEBUG nova.compute.manager [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Refreshing instance network info cache due to event network-changed-7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 865.901236] env[61629]: DEBUG oslo_concurrency.lockutils [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] Acquiring lock "refresh_cache-2b01eeae-64be-44b3-b4cf-c2a8490043e3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.901375] env[61629]: DEBUG oslo_concurrency.lockutils [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] Acquired lock "refresh_cache-2b01eeae-64be-44b3-b4cf-c2a8490043e3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.901536] env[61629]: DEBUG nova.network.neutron [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Refreshing network info cache for port 7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 865.995321] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354140, 'name': PowerOnVM_Task} progress is 90%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.078064] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52e8d3d1-cb5f-3ff0-e848-c52c5772d2d6, 'name': SearchDatastore_Task, 'duration_secs': 0.069481} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.078271] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.078543] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 866.078827] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.079275] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.079275] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 866.079483] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8d8f1d9-b6b5-4ae3-b98e-2d7752a170a2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.092107] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 866.092299] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 866.093050] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-470936b5-be5e-4d87-86ed-67e2f0a862f5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.100433] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 866.100433] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52b3da3f-990b-5086-bf81-23477e64af87" [ 866.100433] env[61629]: _type = "Task" [ 866.100433] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.108438] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52b3da3f-990b-5086-bf81-23477e64af87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.111156] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba4826d-1c76-4b48-a2c7-e1ae2e822f36 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.117657] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941da5c8-3d35-488c-b870-145d96fa8c7d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.148818] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-697519de-5734-477b-ba3b-b626a8a958d8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.159334] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0ea173-cf7f-48b5-b8f7-02df6f5f55a7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.167645] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354142, 'name': ReconfigVM_Task, 'duration_secs': 0.320161} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.175330] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 09890839-b1d9-4558-992d-b1a6f4c5f750/09890839-b1d9-4558-992d-b1a6f4c5f750.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 866.179302] env[61629]: DEBUG nova.compute.provider_tree [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.179503] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e67ed1e5-57be-4cd2-ac22-9032f8c5c3d7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.186234] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 866.186234] env[61629]: value = "task-1354143" [ 866.186234] env[61629]: _type = "Task" [ 866.186234] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.195044] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354143, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.222925] env[61629]: DEBUG nova.network.neutron [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.243361] env[61629]: DEBUG nova.compute.manager [req-6a343365-66fb-4d5a-852d-2d6ead56cabc req-351ea056-86e6-4955-be4d-2225d9faca48 service nova] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Received event network-vif-plugged-b0ccf912-7d97-4281-943f-c7ccdf8eec23 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.243361] env[61629]: DEBUG oslo_concurrency.lockutils [req-6a343365-66fb-4d5a-852d-2d6ead56cabc req-351ea056-86e6-4955-be4d-2225d9faca48 service nova] Acquiring lock "d37958f8-7607-418b-9cfd-c3a5df721e94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.243361] env[61629]: DEBUG oslo_concurrency.lockutils [req-6a343365-66fb-4d5a-852d-2d6ead56cabc req-351ea056-86e6-4955-be4d-2225d9faca48 service nova] Lock "d37958f8-7607-418b-9cfd-c3a5df721e94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.243361] env[61629]: DEBUG oslo_concurrency.lockutils [req-6a343365-66fb-4d5a-852d-2d6ead56cabc req-351ea056-86e6-4955-be4d-2225d9faca48 service nova] Lock "d37958f8-7607-418b-9cfd-c3a5df721e94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.243361] env[61629]: DEBUG nova.compute.manager [req-6a343365-66fb-4d5a-852d-2d6ead56cabc req-351ea056-86e6-4955-be4d-2225d9faca48 service nova] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] No waiting events found dispatching network-vif-plugged-b0ccf912-7d97-4281-943f-c7ccdf8eec23 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 866.243361] env[61629]: WARNING nova.compute.manager [req-6a343365-66fb-4d5a-852d-2d6ead56cabc req-351ea056-86e6-4955-be4d-2225d9faca48 service nova] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Received unexpected event network-vif-plugged-b0ccf912-7d97-4281-943f-c7ccdf8eec23 for instance with vm_state building and task_state spawning. [ 866.243361] env[61629]: DEBUG nova.compute.manager [req-6a343365-66fb-4d5a-852d-2d6ead56cabc req-351ea056-86e6-4955-be4d-2225d9faca48 service nova] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Received event network-changed-b0ccf912-7d97-4281-943f-c7ccdf8eec23 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.243612] env[61629]: DEBUG nova.compute.manager [req-6a343365-66fb-4d5a-852d-2d6ead56cabc req-351ea056-86e6-4955-be4d-2225d9faca48 service nova] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Refreshing instance network info cache due to event network-changed-b0ccf912-7d97-4281-943f-c7ccdf8eec23. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 866.243656] env[61629]: DEBUG oslo_concurrency.lockutils [req-6a343365-66fb-4d5a-852d-2d6ead56cabc req-351ea056-86e6-4955-be4d-2225d9faca48 service nova] Acquiring lock "refresh_cache-d37958f8-7607-418b-9cfd-c3a5df721e94" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.316441] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.495529] env[61629]: DEBUG oslo_vmware.api [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354140, 'name': PowerOnVM_Task, 'duration_secs': 1.159391} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.496498] env[61629]: DEBUG nova.network.neutron [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Updating instance_info_cache with network_info: [{"id": "b0ccf912-7d97-4281-943f-c7ccdf8eec23", "address": "fa:16:3e:2a:3e:3d", "network": {"id": "4e6a4470-c260-4226-9409-37a70cc1e8c1", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-817073300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6165d04bf0a468faaab339addeaa59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0ccf912-7d", "ovs_interfaceid": "b0ccf912-7d97-4281-943f-c7ccdf8eec23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.497723] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 866.497958] env[61629]: INFO nova.compute.manager [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Took 9.39 seconds to spawn the instance on the hypervisor. [ 866.498154] env[61629]: DEBUG nova.compute.manager [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 866.501050] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3578a168-a4a1-4015-bdf8-675a1785ba45 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.618896] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52b3da3f-990b-5086-bf81-23477e64af87, 'name': SearchDatastore_Task, 'duration_secs': 0.018585} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.619775] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf8baa1c-a7c7-4883-a7e2-04ed457ba3fe {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.627435] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 866.627435] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52ef5c2e-037e-e76d-ecb5-b81b82c284ce" [ 866.627435] env[61629]: _type = "Task" [ 866.627435] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.635486] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52ef5c2e-037e-e76d-ecb5-b81b82c284ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.682848] env[61629]: DEBUG nova.scheduler.client.report [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 866.700788] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354143, 'name': Rename_Task, 'duration_secs': 0.177081} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.703094] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 866.703359] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66005125-a3c2-43c3-a951-41f54dd79e90 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.712390] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 866.712390] env[61629]: value = "task-1354144" [ 866.712390] env[61629]: _type = "Task" [ 866.712390] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.727345] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354144, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.797727] env[61629]: DEBUG nova.compute.manager [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 866.826820] env[61629]: DEBUG nova.virt.hardware [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 866.827123] env[61629]: DEBUG nova.virt.hardware [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 866.827290] env[61629]: DEBUG nova.virt.hardware [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 866.827465] env[61629]: DEBUG nova.virt.hardware [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 866.827631] env[61629]: DEBUG nova.virt.hardware [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 866.827772] env[61629]: DEBUG nova.virt.hardware [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 866.827994] env[61629]: DEBUG nova.virt.hardware [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 866.828534] env[61629]: DEBUG nova.virt.hardware [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 866.828780] env[61629]: DEBUG nova.virt.hardware [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 866.828886] env[61629]: DEBUG nova.virt.hardware [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 866.829073] env[61629]: DEBUG nova.virt.hardware [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 866.830092] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856ad9ad-de5b-42ad-a147-57997b24786d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.841302] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c038773b-efd5-4b1c-8de8-21e9e065a011 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.849027] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf89d99-9a14-4ab2-b858-3022784e913d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.868184] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Doing hard reboot of VM {{(pid=61629) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 866.868857] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-7391b4f4-298f-41c1-a7f9-3e7e7ef23c78 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.875479] env[61629]: DEBUG oslo_vmware.api [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Waiting for the task: (returnval){ [ 866.875479] env[61629]: value = "task-1354145" [ 866.875479] env[61629]: _type = "Task" [ 866.875479] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.884846] env[61629]: DEBUG oslo_vmware.api [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354145, 'name': ResetVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.944980] env[61629]: DEBUG nova.network.neutron [req-f6db6542-0dc9-45ff-936d-db3fbe8eb650 req-73658c86-c200-4b59-9c37-7256a3444138 service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Updated VIF entry in instance network info cache for port 26b1c08a-ffa0-488a-ae0b-482ca395c8ad. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 866.945388] env[61629]: DEBUG nova.network.neutron [req-f6db6542-0dc9-45ff-936d-db3fbe8eb650 req-73658c86-c200-4b59-9c37-7256a3444138 service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Updating instance_info_cache with network_info: [{"id": "26b1c08a-ffa0-488a-ae0b-482ca395c8ad", "address": "fa:16:3e:17:c6:e3", "network": {"id": "a7162f2a-5965-4793-b504-070d397c4652", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-205722911-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "27cc5f4c983a4a40aca3f207a6fed658", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eed34ae1-5f7f-4deb-9db8-85eaa1e60c29", "external-id": "nsx-vlan-transportzone-780", "segmentation_id": 780, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26b1c08a-ff", "ovs_interfaceid": "26b1c08a-ffa0-488a-ae0b-482ca395c8ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.001531] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Releasing lock "refresh_cache-d37958f8-7607-418b-9cfd-c3a5df721e94" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.001745] env[61629]: DEBUG nova.compute.manager [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Instance network_info: |[{"id": "b0ccf912-7d97-4281-943f-c7ccdf8eec23", "address": "fa:16:3e:2a:3e:3d", "network": {"id": "4e6a4470-c260-4226-9409-37a70cc1e8c1", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-817073300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6165d04bf0a468faaab339addeaa59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0ccf912-7d", "ovs_interfaceid": "b0ccf912-7d97-4281-943f-c7ccdf8eec23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 867.002087] env[61629]: DEBUG oslo_concurrency.lockutils [req-6a343365-66fb-4d5a-852d-2d6ead56cabc req-351ea056-86e6-4955-be4d-2225d9faca48 service nova] Acquired lock "refresh_cache-d37958f8-7607-418b-9cfd-c3a5df721e94" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.002285] env[61629]: DEBUG nova.network.neutron [req-6a343365-66fb-4d5a-852d-2d6ead56cabc req-351ea056-86e6-4955-be4d-2225d9faca48 service nova] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Refreshing network info cache for port b0ccf912-7d97-4281-943f-c7ccdf8eec23 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 867.003700] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:3e:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0ccf912-7d97-4281-943f-c7ccdf8eec23', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.012810] env[61629]: DEBUG oslo.service.loopingcall [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 867.014010] env[61629]: DEBUG nova.network.neutron [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Updated VIF entry in instance network info cache for port 7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 867.014369] env[61629]: DEBUG nova.network.neutron [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Updating instance_info_cache with network_info: [{"id": "7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3", "address": "fa:16:3e:c9:0f:8e", "network": {"id": "3cf05e01-9439-42dd-803f-fa703eb96988", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1374890316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b0b101e81dfe4c8b98314be278282c0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ee83b47-4c", "ovs_interfaceid": "7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.023040] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.024758] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-533d5be5-7340-4dab-a911-044d7c0fe34f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.041932] env[61629]: INFO nova.compute.manager [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Took 29.10 seconds to build instance. [ 867.050344] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.050344] env[61629]: value = "task-1354146" [ 867.050344] env[61629]: _type = "Task" [ 867.050344] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.059328] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354146, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.139202] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52ef5c2e-037e-e76d-ecb5-b81b82c284ce, 'name': SearchDatastore_Task, 'duration_secs': 0.029809} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.139435] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.139829] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66/9c340ca1-75e0-4d65-8aae-0d5e11ff3e66.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 867.140262] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97da8f62-750f-45b3-94be-17502c6a683b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.147732] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 867.147732] env[61629]: value = "task-1354147" [ 867.147732] env[61629]: _type = "Task" [ 867.147732] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.156927] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354147, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.189108] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.189651] env[61629]: DEBUG nova.compute.manager [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 867.192854] env[61629]: DEBUG oslo_concurrency.lockutils [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.348s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.193216] env[61629]: DEBUG nova.objects.instance [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lazy-loading 'resources' on Instance uuid dce0c7e1-1e47-49ad-88f7-f8f5e293d239 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 867.223275] env[61629]: DEBUG oslo_vmware.api [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354144, 'name': PowerOnVM_Task, 'duration_secs': 0.452776} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.223676] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 867.223889] env[61629]: INFO nova.compute.manager [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Took 7.74 seconds to spawn the instance on the hypervisor. [ 867.224080] env[61629]: DEBUG nova.compute.manager [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 867.224864] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5cc133-4053-4e97-a470-36c720057262 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.276362] env[61629]: DEBUG nova.network.neutron [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Successfully updated port: f083b4ff-bb03-4d2c-90b7-524af188ccb0 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 867.386336] env[61629]: DEBUG oslo_vmware.api [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354145, 'name': ResetVM_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.449282] env[61629]: DEBUG oslo_concurrency.lockutils [req-f6db6542-0dc9-45ff-936d-db3fbe8eb650 req-73658c86-c200-4b59-9c37-7256a3444138 service nova] Releasing lock "refresh_cache-68c1e93a-2829-4764-a900-75c3479b4715" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.516516] env[61629]: DEBUG oslo_concurrency.lockutils [req-7fed2275-a844-4c33-9082-74ff06f201bf req-dc7ae778-9ddc-4f4d-bc8c-3bca2a63d226 service nova] Releasing lock "refresh_cache-2b01eeae-64be-44b3-b4cf-c2a8490043e3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.545786] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9313d172-b43b-482d-b688-7523045764a2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "08cb71f4-2ebe-4694-856c-2e772f319cdf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.680s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.562089] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354146, 'name': CreateVM_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.660238] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354147, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.694218] env[61629]: DEBUG nova.compute.utils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 867.695612] env[61629]: DEBUG nova.compute.manager [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 867.696261] env[61629]: DEBUG nova.network.neutron [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 867.729017] env[61629]: DEBUG nova.network.neutron [req-6a343365-66fb-4d5a-852d-2d6ead56cabc req-351ea056-86e6-4955-be4d-2225d9faca48 service nova] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Updated VIF entry in instance network info cache for port b0ccf912-7d97-4281-943f-c7ccdf8eec23. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 867.730081] env[61629]: DEBUG nova.network.neutron [req-6a343365-66fb-4d5a-852d-2d6ead56cabc req-351ea056-86e6-4955-be4d-2225d9faca48 service nova] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Updating instance_info_cache with network_info: [{"id": "b0ccf912-7d97-4281-943f-c7ccdf8eec23", "address": "fa:16:3e:2a:3e:3d", "network": {"id": "4e6a4470-c260-4226-9409-37a70cc1e8c1", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-817073300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6165d04bf0a468faaab339addeaa59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0ccf912-7d", "ovs_interfaceid": "b0ccf912-7d97-4281-943f-c7ccdf8eec23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.743245] env[61629]: DEBUG nova.policy [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc766b0845b443a8a92346e5d032baca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87909880104e4519b42cb204f366af3f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 867.751659] env[61629]: INFO nova.compute.manager [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Took 25.64 seconds to build instance. [ 867.769770] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquiring lock "08cb71f4-2ebe-4694-856c-2e772f319cdf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.770042] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "08cb71f4-2ebe-4694-856c-2e772f319cdf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.770415] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquiring lock "08cb71f4-2ebe-4694-856c-2e772f319cdf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.770601] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "08cb71f4-2ebe-4694-856c-2e772f319cdf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.770778] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "08cb71f4-2ebe-4694-856c-2e772f319cdf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.775208] env[61629]: INFO nova.compute.manager [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Terminating instance [ 867.778759] env[61629]: DEBUG nova.compute.manager [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 867.778979] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 867.780107] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5400c1a6-2a53-4f7d-a73a-d60390096ca3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.783296] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "refresh_cache-da1eb7f9-7562-40c8-955b-c11f831b7bc8" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.783428] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquired lock "refresh_cache-da1eb7f9-7562-40c8-955b-c11f831b7bc8" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.783593] env[61629]: DEBUG nova.network.neutron [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 867.793015] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 867.793693] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-def968dc-1768-4eee-898d-07fed3dc4254 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.801788] env[61629]: DEBUG oslo_vmware.api [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Waiting for the task: (returnval){ [ 867.801788] env[61629]: value = "task-1354148" [ 867.801788] env[61629]: _type = "Task" [ 867.801788] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.810163] env[61629]: DEBUG oslo_vmware.api [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354148, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.886408] env[61629]: DEBUG oslo_vmware.api [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354145, 'name': ResetVM_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.988644] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1422fe3-9cea-4c0b-817d-6e897a2ac626 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.996670] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085c3a31-845c-44f7-af22-b55a654b5dbe {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.034563] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1441f2-ced5-4bb1-8897-561e5b8f904c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.039697] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af7011d-ead9-45db-b2af-4f4489f62ea3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.053068] env[61629]: DEBUG nova.compute.manager [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 868.055957] env[61629]: DEBUG nova.compute.provider_tree [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 868.057824] env[61629]: DEBUG nova.network.neutron [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Successfully created port: bff06c9b-54d2-4109-b2de-70fbab2c58d4 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 868.068550] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354146, 'name': CreateVM_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.162799] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354147, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.200916] env[61629]: DEBUG nova.compute.manager [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 868.235134] env[61629]: DEBUG oslo_concurrency.lockutils [req-6a343365-66fb-4d5a-852d-2d6ead56cabc req-351ea056-86e6-4955-be4d-2225d9faca48 service nova] Releasing lock "refresh_cache-d37958f8-7607-418b-9cfd-c3a5df721e94" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.254279] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ee68df9e-953e-4dea-91e6-26836eaaf5e7 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "09890839-b1d9-4558-992d-b1a6f4c5f750" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.113s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.277591] env[61629]: DEBUG nova.compute.manager [req-3d88eba8-7d5b-4ebb-b12b-edffb1239d05 req-2ee575ef-d071-4a24-ba2f-a313af0c91e1 service nova] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Received event network-vif-plugged-f083b4ff-bb03-4d2c-90b7-524af188ccb0 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 868.278178] env[61629]: DEBUG oslo_concurrency.lockutils [req-3d88eba8-7d5b-4ebb-b12b-edffb1239d05 req-2ee575ef-d071-4a24-ba2f-a313af0c91e1 service nova] Acquiring lock "da1eb7f9-7562-40c8-955b-c11f831b7bc8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.278361] env[61629]: DEBUG oslo_concurrency.lockutils [req-3d88eba8-7d5b-4ebb-b12b-edffb1239d05 req-2ee575ef-d071-4a24-ba2f-a313af0c91e1 service nova] Lock "da1eb7f9-7562-40c8-955b-c11f831b7bc8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.279973] env[61629]: DEBUG oslo_concurrency.lockutils [req-3d88eba8-7d5b-4ebb-b12b-edffb1239d05 req-2ee575ef-d071-4a24-ba2f-a313af0c91e1 service nova] Lock "da1eb7f9-7562-40c8-955b-c11f831b7bc8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.279973] env[61629]: DEBUG nova.compute.manager [req-3d88eba8-7d5b-4ebb-b12b-edffb1239d05 req-2ee575ef-d071-4a24-ba2f-a313af0c91e1 service nova] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] No waiting events found dispatching network-vif-plugged-f083b4ff-bb03-4d2c-90b7-524af188ccb0 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 868.279973] env[61629]: WARNING nova.compute.manager [req-3d88eba8-7d5b-4ebb-b12b-edffb1239d05 req-2ee575ef-d071-4a24-ba2f-a313af0c91e1 service nova] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Received unexpected event network-vif-plugged-f083b4ff-bb03-4d2c-90b7-524af188ccb0 for instance with vm_state building and task_state spawning. [ 868.279973] env[61629]: DEBUG nova.compute.manager [req-3d88eba8-7d5b-4ebb-b12b-edffb1239d05 req-2ee575ef-d071-4a24-ba2f-a313af0c91e1 service nova] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Received event network-changed-f083b4ff-bb03-4d2c-90b7-524af188ccb0 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 868.279973] env[61629]: DEBUG nova.compute.manager [req-3d88eba8-7d5b-4ebb-b12b-edffb1239d05 req-2ee575ef-d071-4a24-ba2f-a313af0c91e1 service nova] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Refreshing instance network info cache due to event network-changed-f083b4ff-bb03-4d2c-90b7-524af188ccb0. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 868.279973] env[61629]: DEBUG oslo_concurrency.lockutils [req-3d88eba8-7d5b-4ebb-b12b-edffb1239d05 req-2ee575ef-d071-4a24-ba2f-a313af0c91e1 service nova] Acquiring lock "refresh_cache-da1eb7f9-7562-40c8-955b-c11f831b7bc8" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.318325] env[61629]: DEBUG oslo_vmware.api [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354148, 'name': PowerOffVM_Task, 'duration_secs': 0.393569} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.318325] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 868.318325] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 868.318325] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c0de9c3-3724-467c-9153-686a8b5d3ece {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.320088] env[61629]: DEBUG nova.network.neutron [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 868.387646] env[61629]: DEBUG oslo_vmware.api [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354145, 'name': ResetVM_Task, 'duration_secs': 1.094388} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.387929] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Did hard reboot of VM {{(pid=61629) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 868.388163] env[61629]: DEBUG nova.compute.manager [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 868.389012] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706f15d8-054b-4145-9187-8041e7130247 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.403982] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 868.403982] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 868.403982] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Deleting the datastore file [datastore2] 08cb71f4-2ebe-4694-856c-2e772f319cdf {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 868.404213] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-873dc81f-91d2-477f-a140-e7c37c868f13 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.414330] env[61629]: DEBUG oslo_vmware.api [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Waiting for the task: (returnval){ [ 868.414330] env[61629]: value = "task-1354150" [ 868.414330] env[61629]: _type = "Task" [ 868.414330] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.426904] env[61629]: DEBUG oslo_vmware.api [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354150, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.538284] env[61629]: DEBUG nova.network.neutron [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Updating instance_info_cache with network_info: [{"id": "f083b4ff-bb03-4d2c-90b7-524af188ccb0", "address": "fa:16:3e:4f:57:69", "network": {"id": "4e6a4470-c260-4226-9409-37a70cc1e8c1", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-817073300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6165d04bf0a468faaab339addeaa59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf083b4ff-bb", "ovs_interfaceid": "f083b4ff-bb03-4d2c-90b7-524af188ccb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.569167] env[61629]: DEBUG nova.scheduler.client.report [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 868.573147] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354146, 'name': CreateVM_Task, 'duration_secs': 1.490403} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.575671] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.579018] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.579018] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.579018] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 868.579018] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddb25d43-55cf-4ab9-bf73-66f2c935a18f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.586098] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 868.586098] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5247eabb-9ecb-d9ad-14c8-dced9669152b" [ 868.586098] env[61629]: _type = "Task" [ 868.586098] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.587985] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.593715] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5247eabb-9ecb-d9ad-14c8-dced9669152b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.663501] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354147, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.308813} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.663501] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66/9c340ca1-75e0-4d65-8aae-0d5e11ff3e66.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 868.663722] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 868.663828] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-607e1fc4-86cb-4d4c-88e1-5a32e35430be {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.670996] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 868.670996] env[61629]: value = "task-1354151" [ 868.670996] env[61629]: _type = "Task" [ 868.670996] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.682887] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354151, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.756943] env[61629]: DEBUG nova.compute.manager [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 868.902474] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a18c6d0c-c88f-4c83-ba92-12b7042dac17 tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "68c1e93a-2829-4764-a900-75c3479b4715" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.063s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.924961] env[61629]: DEBUG oslo_vmware.api [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Task: {'id': task-1354150, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211137} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.925333] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 868.925598] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 868.925864] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 868.926152] env[61629]: INFO nova.compute.manager [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Took 1.15 seconds to destroy the instance on the hypervisor. [ 868.926476] env[61629]: DEBUG oslo.service.loopingcall [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 868.926739] env[61629]: DEBUG nova.compute.manager [-] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 868.926896] env[61629]: DEBUG nova.network.neutron [-] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 869.044645] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Releasing lock "refresh_cache-da1eb7f9-7562-40c8-955b-c11f831b7bc8" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.044994] env[61629]: DEBUG nova.compute.manager [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Instance network_info: |[{"id": "f083b4ff-bb03-4d2c-90b7-524af188ccb0", "address": "fa:16:3e:4f:57:69", "network": {"id": "4e6a4470-c260-4226-9409-37a70cc1e8c1", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-817073300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6165d04bf0a468faaab339addeaa59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf083b4ff-bb", "ovs_interfaceid": "f083b4ff-bb03-4d2c-90b7-524af188ccb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 869.045365] env[61629]: DEBUG oslo_concurrency.lockutils [req-3d88eba8-7d5b-4ebb-b12b-edffb1239d05 req-2ee575ef-d071-4a24-ba2f-a313af0c91e1 service nova] Acquired lock "refresh_cache-da1eb7f9-7562-40c8-955b-c11f831b7bc8" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.045640] env[61629]: DEBUG nova.network.neutron [req-3d88eba8-7d5b-4ebb-b12b-edffb1239d05 req-2ee575ef-d071-4a24-ba2f-a313af0c91e1 service nova] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Refreshing network info cache for port f083b4ff-bb03-4d2c-90b7-524af188ccb0 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 869.047178] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:57:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f083b4ff-bb03-4d2c-90b7-524af188ccb0', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 869.057075] env[61629]: DEBUG oslo.service.loopingcall [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 869.060086] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 869.060588] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0228dd9-df4d-4c3b-928d-400221abf358 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.078040] env[61629]: DEBUG oslo_concurrency.lockutils [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.885s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.080194] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.633s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.081687] env[61629]: INFO nova.compute.claims [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 869.091636] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 869.091636] env[61629]: value = "task-1354152" [ 869.091636] env[61629]: _type = "Task" [ 869.091636] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.102658] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5247eabb-9ecb-d9ad-14c8-dced9669152b, 'name': SearchDatastore_Task, 'duration_secs': 0.010134} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.107707] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.107967] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 869.108305] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.108463] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.108806] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 869.109302] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354152, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.109542] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c095161-e62b-4b1a-a090-c435328ec7c0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.113196] env[61629]: INFO nova.scheduler.client.report [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Deleted allocations for instance dce0c7e1-1e47-49ad-88f7-f8f5e293d239 [ 869.126369] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 869.126549] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 869.127358] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-908344ed-5361-4bf6-9dfc-ced56084d3ca {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.133627] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 869.133627] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52c16a6b-219d-37a3-6483-51bd6e28fed7" [ 869.133627] env[61629]: _type = "Task" [ 869.133627] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.141393] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c16a6b-219d-37a3-6483-51bd6e28fed7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.181309] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354151, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076501} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.182173] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 869.182954] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc72ce0-6b7c-48bc-8a28-36a5879f41de {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.205355] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66/9c340ca1-75e0-4d65-8aae-0d5e11ff3e66.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.207964] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ed3df09-38b1-40ff-a3c6-d45ce7d3c413 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.223036] env[61629]: DEBUG nova.compute.manager [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 869.231400] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 869.231400] env[61629]: value = "task-1354153" [ 869.231400] env[61629]: _type = "Task" [ 869.231400] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.242055] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354153, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.253802] env[61629]: DEBUG nova.virt.hardware [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 869.254095] env[61629]: DEBUG nova.virt.hardware [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 869.254219] env[61629]: DEBUG nova.virt.hardware [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 869.254407] env[61629]: DEBUG nova.virt.hardware [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 869.254526] env[61629]: DEBUG nova.virt.hardware [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 869.254730] env[61629]: DEBUG nova.virt.hardware [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 869.254992] env[61629]: DEBUG nova.virt.hardware [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 869.255187] env[61629]: DEBUG nova.virt.hardware [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 869.255359] env[61629]: DEBUG nova.virt.hardware [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 869.255523] env[61629]: DEBUG nova.virt.hardware [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 869.255714] env[61629]: DEBUG nova.virt.hardware [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 869.256700] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60b01b7-98d0-4483-9053-b3b3b9c72cef {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.267841] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6e05db-6c34-4427-a4f6-f6989b2717a0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.285023] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.413336] env[61629]: DEBUG nova.network.neutron [req-3d88eba8-7d5b-4ebb-b12b-edffb1239d05 req-2ee575ef-d071-4a24-ba2f-a313af0c91e1 service nova] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Updated VIF entry in instance network info cache for port f083b4ff-bb03-4d2c-90b7-524af188ccb0. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 869.413709] env[61629]: DEBUG nova.network.neutron [req-3d88eba8-7d5b-4ebb-b12b-edffb1239d05 req-2ee575ef-d071-4a24-ba2f-a313af0c91e1 service nova] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Updating instance_info_cache with network_info: [{"id": "f083b4ff-bb03-4d2c-90b7-524af188ccb0", "address": "fa:16:3e:4f:57:69", "network": {"id": "4e6a4470-c260-4226-9409-37a70cc1e8c1", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-817073300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6165d04bf0a468faaab339addeaa59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf083b4ff-bb", "ovs_interfaceid": "f083b4ff-bb03-4d2c-90b7-524af188ccb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.602928] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354152, 'name': CreateVM_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.629218] env[61629]: DEBUG oslo_concurrency.lockutils [None req-224f8835-b020-4909-b2cd-66c794d887ac tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "dce0c7e1-1e47-49ad-88f7-f8f5e293d239" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.747s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.645500] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c16a6b-219d-37a3-6483-51bd6e28fed7, 'name': SearchDatastore_Task, 'duration_secs': 0.025584} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.647562] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33faf296-e167-4c9d-b6a5-4083ab53727f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.652890] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 869.652890] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52275284-7e2a-717b-e1c2-8c3563844ede" [ 869.652890] env[61629]: _type = "Task" [ 869.652890] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.663436] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52275284-7e2a-717b-e1c2-8c3563844ede, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.725818] env[61629]: DEBUG nova.compute.manager [req-f5da702d-b113-47dc-afe5-2a68db2af057 req-cef1aadb-4c70-4c7f-b1b4-9c8856bfdbca service nova] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Received event network-vif-plugged-bff06c9b-54d2-4109-b2de-70fbab2c58d4 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 869.726065] env[61629]: DEBUG oslo_concurrency.lockutils [req-f5da702d-b113-47dc-afe5-2a68db2af057 req-cef1aadb-4c70-4c7f-b1b4-9c8856bfdbca service nova] Acquiring lock "87a1383f-d66b-4bde-b153-89ac62ff8390-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.726296] env[61629]: DEBUG oslo_concurrency.lockutils [req-f5da702d-b113-47dc-afe5-2a68db2af057 req-cef1aadb-4c70-4c7f-b1b4-9c8856bfdbca service nova] Lock "87a1383f-d66b-4bde-b153-89ac62ff8390-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.726620] env[61629]: DEBUG oslo_concurrency.lockutils [req-f5da702d-b113-47dc-afe5-2a68db2af057 req-cef1aadb-4c70-4c7f-b1b4-9c8856bfdbca service nova] Lock "87a1383f-d66b-4bde-b153-89ac62ff8390-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.726620] env[61629]: DEBUG nova.compute.manager [req-f5da702d-b113-47dc-afe5-2a68db2af057 req-cef1aadb-4c70-4c7f-b1b4-9c8856bfdbca service nova] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] No waiting events found dispatching network-vif-plugged-bff06c9b-54d2-4109-b2de-70fbab2c58d4 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 869.726791] env[61629]: WARNING nova.compute.manager [req-f5da702d-b113-47dc-afe5-2a68db2af057 req-cef1aadb-4c70-4c7f-b1b4-9c8856bfdbca service nova] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Received unexpected event network-vif-plugged-bff06c9b-54d2-4109-b2de-70fbab2c58d4 for instance with vm_state building and task_state spawning. [ 869.744028] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354153, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.754490] env[61629]: DEBUG nova.network.neutron [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Successfully updated port: bff06c9b-54d2-4109-b2de-70fbab2c58d4 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 869.893141] env[61629]: DEBUG nova.network.neutron [-] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.916479] env[61629]: DEBUG oslo_concurrency.lockutils [req-3d88eba8-7d5b-4ebb-b12b-edffb1239d05 req-2ee575ef-d071-4a24-ba2f-a313af0c91e1 service nova] Releasing lock "refresh_cache-da1eb7f9-7562-40c8-955b-c11f831b7bc8" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.105610] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354152, 'name': CreateVM_Task, 'duration_secs': 0.845196} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.106346] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 870.106635] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.107908] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.107908] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 870.107908] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0cdcc05-1de9-4eb9-8e79-c8e7673372f7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.112530] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 870.112530] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52b5a6cc-7291-98b1-0ea0-624b322ff7fb" [ 870.112530] env[61629]: _type = "Task" [ 870.112530] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.123908] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52b5a6cc-7291-98b1-0ea0-624b322ff7fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.164187] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52275284-7e2a-717b-e1c2-8c3563844ede, 'name': SearchDatastore_Task, 'duration_secs': 0.046291} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.168218] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.168850] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] d37958f8-7607-418b-9cfd-c3a5df721e94/d37958f8-7607-418b-9cfd-c3a5df721e94.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 870.169803] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4fb521b0-513e-406d-ac32-76ed06be22c9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.182975] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 870.182975] env[61629]: value = "task-1354154" [ 870.182975] env[61629]: _type = "Task" [ 870.182975] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.194446] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354154, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.246749] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354153, 'name': ReconfigVM_Task, 'duration_secs': 0.845118} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.246749] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66/9c340ca1-75e0-4d65-8aae-0d5e11ff3e66.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 870.247389] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36701b84-b463-415c-8ca0-3dd9379da7d8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.258277] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 870.258277] env[61629]: value = "task-1354155" [ 870.258277] env[61629]: _type = "Task" [ 870.258277] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.262147] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "refresh_cache-87a1383f-d66b-4bde-b153-89ac62ff8390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.262147] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "refresh_cache-87a1383f-d66b-4bde-b153-89ac62ff8390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.262147] env[61629]: DEBUG nova.network.neutron [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 870.268530] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354155, 'name': Rename_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.342354] env[61629]: DEBUG nova.compute.manager [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Received event network-vif-deleted-51940ecf-0cf7-40a7-ad25-0aab2c24a535 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 870.342631] env[61629]: DEBUG nova.compute.manager [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Received event network-changed-91aa1640-3097-4a26-9090-4081740f917d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 870.342787] env[61629]: DEBUG nova.compute.manager [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Refreshing instance network info cache due to event network-changed-91aa1640-3097-4a26-9090-4081740f917d. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 870.342950] env[61629]: DEBUG oslo_concurrency.lockutils [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] Acquiring lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.343098] env[61629]: DEBUG oslo_concurrency.lockutils [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] Acquired lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.343294] env[61629]: DEBUG nova.network.neutron [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Refreshing network info cache for port 91aa1640-3097-4a26-9090-4081740f917d {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 870.396282] env[61629]: INFO nova.compute.manager [-] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Took 1.47 seconds to deallocate network for instance. [ 870.398044] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c763777d-31a9-4b15-9207-210a59b3ec1f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.408233] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b085e529-b864-4daa-8fb8-1a5eabe92b5b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.441087] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c628f8c6-5ff2-4c70-9339-a7ea4ae819a6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.449892] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4dabfc9-4568-4f45-95f1-fe196875d7d8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.465103] env[61629]: DEBUG nova.compute.provider_tree [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.513805] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.514107] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.625863] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52b5a6cc-7291-98b1-0ea0-624b322ff7fb, 'name': SearchDatastore_Task, 'duration_secs': 0.014556} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.626205] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.626436] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 870.626662] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.626870] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.627099] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 870.627360] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6afebf7e-82c8-4dc3-8c37-338490c2758a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.636804] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 870.637079] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 870.638186] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ec43149-2d31-4397-a420-042226e8f0bb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.643722] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 870.643722] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]523d6b71-c1ed-9ff7-629c-34cbcad4f2f7" [ 870.643722] env[61629]: _type = "Task" [ 870.643722] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.652088] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquiring lock "68c1e93a-2829-4764-a900-75c3479b4715" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.652980] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "68c1e93a-2829-4764-a900-75c3479b4715" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.652980] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquiring lock "68c1e93a-2829-4764-a900-75c3479b4715-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.652980] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "68c1e93a-2829-4764-a900-75c3479b4715-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.652980] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "68c1e93a-2829-4764-a900-75c3479b4715-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.654864] env[61629]: INFO nova.compute.manager [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Terminating instance [ 870.656974] env[61629]: DEBUG nova.compute.manager [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 870.656974] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 870.657671] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]523d6b71-c1ed-9ff7-629c-34cbcad4f2f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.658503] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818300af-1d72-408f-b35f-562ee2486bd5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.665896] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 870.666476] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93823e00-7b69-49ad-a4a1-13a315ff30dc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.672591] env[61629]: DEBUG oslo_vmware.api [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Waiting for the task: (returnval){ [ 870.672591] env[61629]: value = "task-1354156" [ 870.672591] env[61629]: _type = "Task" [ 870.672591] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.684438] env[61629]: DEBUG oslo_vmware.api [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354156, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.694391] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354154, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.769829] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354155, 'name': Rename_Task, 'duration_secs': 0.307131} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.770203] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.770479] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83899613-7dd8-4faa-ae68-c52c68dfa395 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.783637] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 870.783637] env[61629]: value = "task-1354157" [ 870.783637] env[61629]: _type = "Task" [ 870.783637] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.798350] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354157, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.821011] env[61629]: DEBUG nova.network.neutron [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 870.905370] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.968333] env[61629]: DEBUG nova.scheduler.client.report [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 871.154073] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]523d6b71-c1ed-9ff7-629c-34cbcad4f2f7, 'name': SearchDatastore_Task, 'duration_secs': 0.018968} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.154971] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-552eae65-9ddf-4882-b2ea-90f92eb2f345 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.160753] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 871.160753] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52f55359-ce3c-61c5-e636-c6079d20c57d" [ 871.160753] env[61629]: _type = "Task" [ 871.160753] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.169495] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52f55359-ce3c-61c5-e636-c6079d20c57d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.182422] env[61629]: DEBUG oslo_vmware.api [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354156, 'name': PowerOffVM_Task, 'duration_secs': 0.46038} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.182711] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 871.182884] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 871.183169] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d94da6a1-5e04-4690-8aca-b311e25292e8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.197353] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354154, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.811512} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.197616] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] d37958f8-7607-418b-9cfd-c3a5df721e94/d37958f8-7607-418b-9cfd-c3a5df721e94.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 871.197828] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 871.198275] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a1d09c2-e728-4f56-98f2-36cf898279dc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.204938] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 871.204938] env[61629]: value = "task-1354159" [ 871.204938] env[61629]: _type = "Task" [ 871.204938] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.213524] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354159, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.221350] env[61629]: DEBUG nova.network.neutron [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Updating instance_info_cache with network_info: [{"id": "bff06c9b-54d2-4109-b2de-70fbab2c58d4", "address": "fa:16:3e:7c:c6:f7", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbff06c9b-54", "ovs_interfaceid": "bff06c9b-54d2-4109-b2de-70fbab2c58d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.252390] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 871.252708] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 871.252945] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Deleting the datastore file [datastore1] 68c1e93a-2829-4764-a900-75c3479b4715 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 871.253241] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ddb3c1e2-8099-461c-ac05-96b643cd8b31 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.261364] env[61629]: DEBUG oslo_vmware.api [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Waiting for the task: (returnval){ [ 871.261364] env[61629]: value = "task-1354160" [ 871.261364] env[61629]: _type = "Task" [ 871.261364] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.272639] env[61629]: DEBUG oslo_vmware.api [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354160, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.293976] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354157, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.392288] env[61629]: DEBUG nova.network.neutron [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updated VIF entry in instance network info cache for port 91aa1640-3097-4a26-9090-4081740f917d. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 871.394772] env[61629]: DEBUG nova.network.neutron [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updating instance_info_cache with network_info: [{"id": "91aa1640-3097-4a26-9090-4081740f917d", "address": "fa:16:3e:d4:a8:15", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91aa1640-30", "ovs_interfaceid": "91aa1640-3097-4a26-9090-4081740f917d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.473471] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.393s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.474025] env[61629]: DEBUG nova.compute.manager [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 871.476954] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.911s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.481020] env[61629]: DEBUG nova.objects.instance [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Lazy-loading 'resources' on Instance uuid edb4e0f6-57ad-48cf-aa20-3b2549bff3fe {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 871.671808] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52f55359-ce3c-61c5-e636-c6079d20c57d, 'name': SearchDatastore_Task, 'duration_secs': 0.009987} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.671808] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.672096] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] da1eb7f9-7562-40c8-955b-c11f831b7bc8/da1eb7f9-7562-40c8-955b-c11f831b7bc8.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 871.672214] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4bc0d40d-6837-4a66-93e0-813c0d119e0b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.678733] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 871.678733] env[61629]: value = "task-1354161" [ 871.678733] env[61629]: _type = "Task" [ 871.678733] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.686201] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354161, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.712986] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354159, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.162983} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.713259] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.714035] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f836a561-21a4-4c58-9184-011c192d0acd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.727087] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "refresh_cache-87a1383f-d66b-4bde-b153-89ac62ff8390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.727399] env[61629]: DEBUG nova.compute.manager [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Instance network_info: |[{"id": "bff06c9b-54d2-4109-b2de-70fbab2c58d4", "address": "fa:16:3e:7c:c6:f7", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbff06c9b-54", "ovs_interfaceid": "bff06c9b-54d2-4109-b2de-70fbab2c58d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 871.736246] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] d37958f8-7607-418b-9cfd-c3a5df721e94/d37958f8-7607-418b-9cfd-c3a5df721e94.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.736652] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:c6:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bff06c9b-54d2-4109-b2de-70fbab2c58d4', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 871.743701] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Creating folder: Project (87909880104e4519b42cb204f366af3f). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 871.743939] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f995cead-6134-4a6c-bf91-c1d6b42da261 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.758826] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dca637d0-91e0-4903-95ad-b2a5face9fac {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.761461] env[61629]: DEBUG nova.compute.manager [req-bef2324c-b262-4c25-b606-1ca6991a79ff req-0d78d94a-4f34-48ab-8c06-3129e824363a service nova] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Received event network-changed-bff06c9b-54d2-4109-b2de-70fbab2c58d4 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 871.761642] env[61629]: DEBUG nova.compute.manager [req-bef2324c-b262-4c25-b606-1ca6991a79ff req-0d78d94a-4f34-48ab-8c06-3129e824363a service nova] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Refreshing instance network info cache due to event network-changed-bff06c9b-54d2-4109-b2de-70fbab2c58d4. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 871.761850] env[61629]: DEBUG oslo_concurrency.lockutils [req-bef2324c-b262-4c25-b606-1ca6991a79ff req-0d78d94a-4f34-48ab-8c06-3129e824363a service nova] Acquiring lock "refresh_cache-87a1383f-d66b-4bde-b153-89ac62ff8390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.761988] env[61629]: DEBUG oslo_concurrency.lockutils [req-bef2324c-b262-4c25-b606-1ca6991a79ff req-0d78d94a-4f34-48ab-8c06-3129e824363a service nova] Acquired lock "refresh_cache-87a1383f-d66b-4bde-b153-89ac62ff8390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.762165] env[61629]: DEBUG nova.network.neutron [req-bef2324c-b262-4c25-b606-1ca6991a79ff req-0d78d94a-4f34-48ab-8c06-3129e824363a service nova] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Refreshing network info cache for port bff06c9b-54d2-4109-b2de-70fbab2c58d4 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 871.769441] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 871.769441] env[61629]: value = "task-1354163" [ 871.769441] env[61629]: _type = "Task" [ 871.769441] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.773100] env[61629]: DEBUG oslo_vmware.api [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Task: {'id': task-1354160, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256856} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.777272] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 871.777474] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 871.777652] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 871.777850] env[61629]: INFO nova.compute.manager [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Took 1.12 seconds to destroy the instance on the hypervisor. [ 871.778148] env[61629]: DEBUG oslo.service.loopingcall [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 871.778369] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Created folder: Project (87909880104e4519b42cb204f366af3f) in parent group-v288443. [ 871.778529] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Creating folder: Instances. Parent ref: group-v288501. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 871.778970] env[61629]: DEBUG nova.compute.manager [-] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 871.779085] env[61629]: DEBUG nova.network.neutron [-] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 871.780517] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52a7db56-0082-4497-9d7d-0f433fb61197 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.789767] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354163, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.793667] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Created folder: Instances in parent group-v288501. [ 871.793974] env[61629]: DEBUG oslo.service.loopingcall [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 871.794591] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 871.794868] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-474e84f5-9819-47f0-9673-61d6cba8ffb4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.815310] env[61629]: DEBUG oslo_vmware.api [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354157, 'name': PowerOnVM_Task, 'duration_secs': 0.709886} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.818007] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 871.818298] env[61629]: INFO nova.compute.manager [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Took 9.89 seconds to spawn the instance on the hypervisor. [ 871.818526] env[61629]: DEBUG nova.compute.manager [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 871.819310] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b298391e-d1b6-4af5-8283-94b6e3bd18d7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.822889] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 871.822889] env[61629]: value = "task-1354165" [ 871.822889] env[61629]: _type = "Task" [ 871.822889] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.834775] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354165, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.896495] env[61629]: DEBUG oslo_concurrency.lockutils [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] Releasing lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.896629] env[61629]: DEBUG nova.compute.manager [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Received event network-changed-26b1c08a-ffa0-488a-ae0b-482ca395c8ad {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 871.896879] env[61629]: DEBUG nova.compute.manager [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Refreshing instance network info cache due to event network-changed-26b1c08a-ffa0-488a-ae0b-482ca395c8ad. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 871.897277] env[61629]: DEBUG oslo_concurrency.lockutils [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] Acquiring lock "refresh_cache-68c1e93a-2829-4764-a900-75c3479b4715" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.897554] env[61629]: DEBUG oslo_concurrency.lockutils [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] Acquired lock "refresh_cache-68c1e93a-2829-4764-a900-75c3479b4715" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.897871] env[61629]: DEBUG nova.network.neutron [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Refreshing network info cache for port 26b1c08a-ffa0-488a-ae0b-482ca395c8ad {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 871.980771] env[61629]: DEBUG nova.compute.utils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 871.982500] env[61629]: DEBUG nova.compute.manager [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 871.982620] env[61629]: DEBUG nova.network.neutron [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 872.040266] env[61629]: DEBUG nova.policy [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc766b0845b443a8a92346e5d032baca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87909880104e4519b42cb204f366af3f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 872.191963] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354161, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.285722] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354163, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.317441] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20579033-ba5b-4e0b-9033-4b5e14fe0fb0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.340182] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8b7744-ce71-4f95-8c43-28d0999fe8d5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.345029] env[61629]: INFO nova.compute.manager [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Took 25.91 seconds to build instance. [ 872.350499] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354165, 'name': CreateVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.381561] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea426731-339c-4632-89bf-17699e66412a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.391389] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae27b847-77b7-44d1-822f-2da0b079dab6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.410752] env[61629]: DEBUG nova.compute.provider_tree [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.436345] env[61629]: INFO nova.network.neutron [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Port 26b1c08a-ffa0-488a-ae0b-482ca395c8ad from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 872.436588] env[61629]: DEBUG nova.network.neutron [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.488510] env[61629]: DEBUG nova.compute.manager [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 872.495376] env[61629]: DEBUG nova.network.neutron [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Successfully created port: 91e4e033-337e-4a36-a5a7-a54b29cc6531 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 872.602082] env[61629]: DEBUG nova.network.neutron [-] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.691441] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354161, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.79739} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.691728] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] da1eb7f9-7562-40c8-955b-c11f831b7bc8/da1eb7f9-7562-40c8-955b-c11f831b7bc8.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 872.691916] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 872.692184] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6b2e660-7a00-45bd-bb74-fb970e44354a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.698222] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 872.698222] env[61629]: value = "task-1354166" [ 872.698222] env[61629]: _type = "Task" [ 872.698222] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.706097] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354166, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.736654] env[61629]: DEBUG nova.network.neutron [req-bef2324c-b262-4c25-b606-1ca6991a79ff req-0d78d94a-4f34-48ab-8c06-3129e824363a service nova] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Updated VIF entry in instance network info cache for port bff06c9b-54d2-4109-b2de-70fbab2c58d4. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 872.736745] env[61629]: DEBUG nova.network.neutron [req-bef2324c-b262-4c25-b606-1ca6991a79ff req-0d78d94a-4f34-48ab-8c06-3129e824363a service nova] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Updating instance_info_cache with network_info: [{"id": "bff06c9b-54d2-4109-b2de-70fbab2c58d4", "address": "fa:16:3e:7c:c6:f7", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbff06c9b-54", "ovs_interfaceid": "bff06c9b-54d2-4109-b2de-70fbab2c58d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.785396] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354163, 'name': ReconfigVM_Task, 'duration_secs': 0.844291} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.785681] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Reconfigured VM instance instance-00000048 to attach disk [datastore1] d37958f8-7607-418b-9cfd-c3a5df721e94/d37958f8-7607-418b-9cfd-c3a5df721e94.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 872.786332] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-18487822-27e1-4372-bc6e-7e6e4033c4a3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.792398] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 872.792398] env[61629]: value = "task-1354167" [ 872.792398] env[61629]: _type = "Task" [ 872.792398] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.801687] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354167, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.837659] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354165, 'name': CreateVM_Task, 'duration_secs': 0.601706} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.837878] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 872.838690] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.838857] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.839218] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 872.839535] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-962e7fa9-7efc-4030-96f8-9269eca07737 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.845140] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 872.845140] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52645726-d207-51c5-7ce7-bec81bd277ba" [ 872.845140] env[61629]: _type = "Task" [ 872.845140] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.853898] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4d62bde6-a027-423d-9f2f-ef6df4089515 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.162s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.854210] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52645726-d207-51c5-7ce7-bec81bd277ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.860059] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "3085a70f-360c-43a3-80d7-e7b87fb3e146" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.860373] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "3085a70f-360c-43a3-80d7-e7b87fb3e146" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.913997] env[61629]: DEBUG nova.scheduler.client.report [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 872.939261] env[61629]: DEBUG oslo_concurrency.lockutils [req-e9cc375e-8f2a-45f9-98e0-752e5266a09c req-e2346914-c40c-4368-a55a-c6e160fd9a1e service nova] Releasing lock "refresh_cache-68c1e93a-2829-4764-a900-75c3479b4715" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.105099] env[61629]: INFO nova.compute.manager [-] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Took 1.33 seconds to deallocate network for instance. [ 873.208588] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354166, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14793} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.208782] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 873.209596] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c219cbd9-36f1-4c69-9597-f7eec1faceb8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.231432] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] da1eb7f9-7562-40c8-955b-c11f831b7bc8/da1eb7f9-7562-40c8-955b-c11f831b7bc8.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 873.231719] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-864fa10d-a15d-4048-ad40-249880157e01 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.245921] env[61629]: DEBUG oslo_concurrency.lockutils [req-bef2324c-b262-4c25-b606-1ca6991a79ff req-0d78d94a-4f34-48ab-8c06-3129e824363a service nova] Releasing lock "refresh_cache-87a1383f-d66b-4bde-b153-89ac62ff8390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.252257] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 873.252257] env[61629]: value = "task-1354168" [ 873.252257] env[61629]: _type = "Task" [ 873.252257] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.260252] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354168, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.303399] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354167, 'name': Rename_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.355960] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52645726-d207-51c5-7ce7-bec81bd277ba, 'name': SearchDatastore_Task, 'duration_secs': 0.009435} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.355960] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.356190] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 873.356451] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.356548] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.356724] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 873.357176] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5cd5a44-163b-48d2-802c-0b99bce396f6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.361067] env[61629]: DEBUG nova.compute.manager [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 873.377040] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 873.377237] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 873.378102] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83ad7e15-f67e-4e56-9376-f2b257da7f5f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.383817] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 873.383817] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]526bb3d3-fa0d-905c-73ee-bf547982a71b" [ 873.383817] env[61629]: _type = "Task" [ 873.383817] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.391289] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526bb3d3-fa0d-905c-73ee-bf547982a71b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.418629] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.942s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.421770] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.046s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.425057] env[61629]: INFO nova.compute.claims [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 873.443544] env[61629]: INFO nova.scheduler.client.report [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Deleted allocations for instance edb4e0f6-57ad-48cf-aa20-3b2549bff3fe [ 873.497552] env[61629]: DEBUG nova.compute.manager [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 873.523393] env[61629]: DEBUG nova.virt.hardware [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 873.523602] env[61629]: DEBUG nova.virt.hardware [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 873.523653] env[61629]: DEBUG nova.virt.hardware [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 873.523950] env[61629]: DEBUG nova.virt.hardware [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 873.524125] env[61629]: DEBUG nova.virt.hardware [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 873.524276] env[61629]: DEBUG nova.virt.hardware [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 873.524487] env[61629]: DEBUG nova.virt.hardware [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 873.524627] env[61629]: DEBUG nova.virt.hardware [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 873.524794] env[61629]: DEBUG nova.virt.hardware [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 873.524944] env[61629]: DEBUG nova.virt.hardware [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 873.525132] env[61629]: DEBUG nova.virt.hardware [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 873.526179] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113ed191-11eb-47e6-a619-c2591916cac6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.535197] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc6a9d7-eed9-46f6-b0db-34ae572a071f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.611538] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.763662] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354168, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.793532] env[61629]: DEBUG nova.compute.manager [req-c081a20e-1573-4f63-b126-3aa13be129fb req-038c21e3-89cc-41f9-9c05-4f124d8d40ac service nova] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Received event network-vif-deleted-26b1c08a-ffa0-488a-ae0b-482ca395c8ad {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 873.803496] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354167, 'name': Rename_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.880662] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.893729] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526bb3d3-fa0d-905c-73ee-bf547982a71b, 'name': SearchDatastore_Task, 'duration_secs': 0.043497} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.894523] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d70c38ce-7dc5-4d31-ba75-648a17a330b5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.899714] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 873.899714] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5235fc71-8414-1d48-d8b9-6c677ee5d7e4" [ 873.899714] env[61629]: _type = "Task" [ 873.899714] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.907417] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5235fc71-8414-1d48-d8b9-6c677ee5d7e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.951660] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f8160814-3104-477b-9550-7064fa86d8d3 tempest-ServerMetadataNegativeTestJSON-1918710909 tempest-ServerMetadataNegativeTestJSON-1918710909-project-member] Lock "edb4e0f6-57ad-48cf-aa20-3b2549bff3fe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.311s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.171312] env[61629]: DEBUG nova.network.neutron [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Successfully updated port: 91e4e033-337e-4a36-a5a7-a54b29cc6531 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 874.264152] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354168, 'name': ReconfigVM_Task, 'duration_secs': 0.992284} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.264654] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Reconfigured VM instance instance-00000049 to attach disk [datastore1] da1eb7f9-7562-40c8-955b-c11f831b7bc8/da1eb7f9-7562-40c8-955b-c11f831b7bc8.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 874.267542] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9abf0c12-daf0-4712-b079-438f93d25a82 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.271836] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 874.271836] env[61629]: value = "task-1354169" [ 874.271836] env[61629]: _type = "Task" [ 874.271836] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.280341] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354169, 'name': Rename_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.304437] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354167, 'name': Rename_Task, 'duration_secs': 1.148155} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.304741] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 874.305066] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c5bb7581-78f5-43e2-a255-a9093640498d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.315019] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 874.315019] env[61629]: value = "task-1354170" [ 874.315019] env[61629]: _type = "Task" [ 874.315019] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.323999] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354170, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.410045] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5235fc71-8414-1d48-d8b9-6c677ee5d7e4, 'name': SearchDatastore_Task, 'duration_secs': 0.024488} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.410320] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.410575] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 87a1383f-d66b-4bde-b153-89ac62ff8390/87a1383f-d66b-4bde-b153-89ac62ff8390.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 874.410838] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57c5d0bb-c9e5-4fe3-83d0-3442c5765ff7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.418022] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 874.418022] env[61629]: value = "task-1354171" [ 874.418022] env[61629]: _type = "Task" [ 874.418022] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.427224] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354171, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.674272] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "refresh_cache-0d21b352-bdd0-4887-8658-cd5c448352d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.674464] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "refresh_cache-0d21b352-bdd0-4887-8658-cd5c448352d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.674528] env[61629]: DEBUG nova.network.neutron [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 874.748764] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e76fb28-3c41-49e3-bab8-17d215a60182 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.757475] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4b7d97-11ed-455e-87a2-ff1079d3cf60 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.796563] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377549c7-7d20-48d3-bb55-aa3222896b94 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.805514] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354169, 'name': Rename_Task, 'duration_secs': 0.20667} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.808187] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 874.808743] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-266c9d8b-1c7f-477f-aca3-5fb27adcbeda {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.811652] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26b9129-e2b4-4589-8046-72d7c4047b43 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.821973] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 874.821973] env[61629]: value = "task-1354172" [ 874.821973] env[61629]: _type = "Task" [ 874.821973] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.831198] env[61629]: DEBUG nova.compute.provider_tree [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.838882] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354170, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.847625] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354172, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.935249] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354171, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.234895] env[61629]: DEBUG nova.network.neutron [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 875.326626] env[61629]: DEBUG oslo_vmware.api [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354170, 'name': PowerOnVM_Task, 'duration_secs': 0.520652} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.326626] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 875.326626] env[61629]: INFO nova.compute.manager [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Took 10.98 seconds to spawn the instance on the hypervisor. [ 875.326626] env[61629]: DEBUG nova.compute.manager [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 875.327146] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-874091df-0214-4fc0-8a62-8e8a85958123 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.341541] env[61629]: DEBUG nova.scheduler.client.report [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 875.356468] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354172, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.430307] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354171, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598795} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.430652] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 87a1383f-d66b-4bde-b153-89ac62ff8390/87a1383f-d66b-4bde-b153-89ac62ff8390.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 875.430913] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 875.431215] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04fb66d6-569e-4565-a98d-5fcb0e4f61dc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.438469] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 875.438469] env[61629]: value = "task-1354173" [ 875.438469] env[61629]: _type = "Task" [ 875.438469] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.446624] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354173, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.479254] env[61629]: DEBUG nova.network.neutron [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Updating instance_info_cache with network_info: [{"id": "91e4e033-337e-4a36-a5a7-a54b29cc6531", "address": "fa:16:3e:76:a0:44", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91e4e033-33", "ovs_interfaceid": "91e4e033-337e-4a36-a5a7-a54b29cc6531", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.833645] env[61629]: DEBUG nova.compute.manager [req-7f15d553-ce2a-47c7-a765-1db77caf2ad0 req-41e4f2d8-d62e-412d-a0f0-fd46ce688264 service nova] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Received event network-vif-plugged-91e4e033-337e-4a36-a5a7-a54b29cc6531 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 875.833645] env[61629]: DEBUG oslo_concurrency.lockutils [req-7f15d553-ce2a-47c7-a765-1db77caf2ad0 req-41e4f2d8-d62e-412d-a0f0-fd46ce688264 service nova] Acquiring lock "0d21b352-bdd0-4887-8658-cd5c448352d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.834366] env[61629]: DEBUG oslo_concurrency.lockutils [req-7f15d553-ce2a-47c7-a765-1db77caf2ad0 req-41e4f2d8-d62e-412d-a0f0-fd46ce688264 service nova] Lock "0d21b352-bdd0-4887-8658-cd5c448352d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.834707] env[61629]: DEBUG oslo_concurrency.lockutils [req-7f15d553-ce2a-47c7-a765-1db77caf2ad0 req-41e4f2d8-d62e-412d-a0f0-fd46ce688264 service nova] Lock "0d21b352-bdd0-4887-8658-cd5c448352d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.835039] env[61629]: DEBUG nova.compute.manager [req-7f15d553-ce2a-47c7-a765-1db77caf2ad0 req-41e4f2d8-d62e-412d-a0f0-fd46ce688264 service nova] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] No waiting events found dispatching network-vif-plugged-91e4e033-337e-4a36-a5a7-a54b29cc6531 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 875.835333] env[61629]: WARNING nova.compute.manager [req-7f15d553-ce2a-47c7-a765-1db77caf2ad0 req-41e4f2d8-d62e-412d-a0f0-fd46ce688264 service nova] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Received unexpected event network-vif-plugged-91e4e033-337e-4a36-a5a7-a54b29cc6531 for instance with vm_state building and task_state spawning. [ 875.835602] env[61629]: DEBUG nova.compute.manager [req-7f15d553-ce2a-47c7-a765-1db77caf2ad0 req-41e4f2d8-d62e-412d-a0f0-fd46ce688264 service nova] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Received event network-changed-91e4e033-337e-4a36-a5a7-a54b29cc6531 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 875.835909] env[61629]: DEBUG nova.compute.manager [req-7f15d553-ce2a-47c7-a765-1db77caf2ad0 req-41e4f2d8-d62e-412d-a0f0-fd46ce688264 service nova] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Refreshing instance network info cache due to event network-changed-91e4e033-337e-4a36-a5a7-a54b29cc6531. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 875.836224] env[61629]: DEBUG oslo_concurrency.lockutils [req-7f15d553-ce2a-47c7-a765-1db77caf2ad0 req-41e4f2d8-d62e-412d-a0f0-fd46ce688264 service nova] Acquiring lock "refresh_cache-0d21b352-bdd0-4887-8658-cd5c448352d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.847044] env[61629]: DEBUG oslo_vmware.api [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354172, 'name': PowerOnVM_Task, 'duration_secs': 0.945551} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.849774] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 875.849774] env[61629]: INFO nova.compute.manager [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Took 9.05 seconds to spawn the instance on the hypervisor. [ 875.849774] env[61629]: DEBUG nova.compute.manager [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 875.849774] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6de894-3c8f-45e7-ae46-327759a73339 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.852413] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.431s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.852686] env[61629]: DEBUG nova.compute.manager [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 875.865450] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.980s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.867060] env[61629]: INFO nova.compute.claims [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.870854] env[61629]: INFO nova.compute.manager [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Took 28.17 seconds to build instance. [ 875.949125] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354173, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.270276} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.949420] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 875.950222] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beacb973-2fd1-4420-a701-3c1cb98c394b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.973126] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 87a1383f-d66b-4bde-b153-89ac62ff8390/87a1383f-d66b-4bde-b153-89ac62ff8390.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.973823] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8aadf6e-bb39-4d87-a944-789d4c7ca5a5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.993159] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "refresh_cache-0d21b352-bdd0-4887-8658-cd5c448352d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.993544] env[61629]: DEBUG nova.compute.manager [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Instance network_info: |[{"id": "91e4e033-337e-4a36-a5a7-a54b29cc6531", "address": "fa:16:3e:76:a0:44", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91e4e033-33", "ovs_interfaceid": "91e4e033-337e-4a36-a5a7-a54b29cc6531", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 875.994397] env[61629]: DEBUG oslo_concurrency.lockutils [req-7f15d553-ce2a-47c7-a765-1db77caf2ad0 req-41e4f2d8-d62e-412d-a0f0-fd46ce688264 service nova] Acquired lock "refresh_cache-0d21b352-bdd0-4887-8658-cd5c448352d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.994582] env[61629]: DEBUG nova.network.neutron [req-7f15d553-ce2a-47c7-a765-1db77caf2ad0 req-41e4f2d8-d62e-412d-a0f0-fd46ce688264 service nova] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Refreshing network info cache for port 91e4e033-337e-4a36-a5a7-a54b29cc6531 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 875.998892] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:a0:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91e4e033-337e-4a36-a5a7-a54b29cc6531', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 876.007492] env[61629]: DEBUG oslo.service.loopingcall [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 876.008400] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 876.008676] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-943bd41a-0b19-4a89-bfba-389e73994239 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.025135] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 876.025135] env[61629]: value = "task-1354174" [ 876.025135] env[61629]: _type = "Task" [ 876.025135] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.033949] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 876.033949] env[61629]: value = "task-1354175" [ 876.033949] env[61629]: _type = "Task" [ 876.033949] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.042594] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354174, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.047420] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354175, 'name': CreateVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.366991] env[61629]: DEBUG nova.compute.utils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 876.368448] env[61629]: DEBUG nova.compute.manager [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 876.368613] env[61629]: DEBUG nova.network.neutron [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 876.378218] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d6e49df0-269a-4589-97f0-3a6191a8debd tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "d37958f8-7607-418b-9cfd-c3a5df721e94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.451s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.387931] env[61629]: INFO nova.compute.manager [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Took 28.01 seconds to build instance. [ 876.447995] env[61629]: DEBUG nova.policy [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c62f9a7c8b5f4ef985880339407b46a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0578ce75c37942d4ba6c8b862ceb7d92', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 876.537621] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354174, 'name': ReconfigVM_Task, 'duration_secs': 0.494462} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.541780] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 87a1383f-d66b-4bde-b153-89ac62ff8390/87a1383f-d66b-4bde-b153-89ac62ff8390.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.542497] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9d24b72-bbfc-498b-8f9e-3be869ccd2c4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.548628] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354175, 'name': CreateVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.553574] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 876.553574] env[61629]: value = "task-1354176" [ 876.553574] env[61629]: _type = "Task" [ 876.553574] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.566158] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354176, 'name': Rename_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.857967] env[61629]: DEBUG nova.network.neutron [req-7f15d553-ce2a-47c7-a765-1db77caf2ad0 req-41e4f2d8-d62e-412d-a0f0-fd46ce688264 service nova] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Updated VIF entry in instance network info cache for port 91e4e033-337e-4a36-a5a7-a54b29cc6531. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 876.862649] env[61629]: DEBUG nova.network.neutron [req-7f15d553-ce2a-47c7-a765-1db77caf2ad0 req-41e4f2d8-d62e-412d-a0f0-fd46ce688264 service nova] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Updating instance_info_cache with network_info: [{"id": "91e4e033-337e-4a36-a5a7-a54b29cc6531", "address": "fa:16:3e:76:a0:44", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91e4e033-33", "ovs_interfaceid": "91e4e033-337e-4a36-a5a7-a54b29cc6531", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.873237] env[61629]: DEBUG nova.compute.manager [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 876.888057] env[61629]: DEBUG nova.compute.manager [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 876.895698] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a1a1c685-43d0-4068-a24a-1ed359972ba6 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "da1eb7f9-7562-40c8-955b-c11f831b7bc8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.656s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.920279] env[61629]: DEBUG nova.network.neutron [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Successfully created port: 91c99a2f-21ee-45af-9321-329b2cfb1799 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 877.050960] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354175, 'name': CreateVM_Task, 'duration_secs': 0.668099} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.053389] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 877.055074] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.056102] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.056102] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 877.056102] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-960f9662-480d-4bc7-b711-f92a22c3d9d8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.065949] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 877.065949] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52605839-1b32-4b8e-b175-52fc5ec296ba" [ 877.065949] env[61629]: _type = "Task" [ 877.065949] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.072758] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354176, 'name': Rename_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.083876] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52605839-1b32-4b8e-b175-52fc5ec296ba, 'name': SearchDatastore_Task, 'duration_secs': 0.014306} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.084214] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.084562] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 877.084857] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.085072] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.085332] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 877.085699] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-757a77f2-a392-4c8e-9101-49ef9a435ee4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.098515] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 877.098515] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 877.099121] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fb50b40-30f8-409b-a36d-787a51ca511e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.107918] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 877.107918] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]526b561c-1fef-0ab1-05af-aaa53a79cf84" [ 877.107918] env[61629]: _type = "Task" [ 877.107918] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.116233] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526b561c-1fef-0ab1-05af-aaa53a79cf84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.245290] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27424c73-2d5b-4ba2-8201-11e85cef8566 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.252964] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9573fb74-5f91-48bf-b1bd-8b68e64c299f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.287415] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251e2ce4-8370-41e6-af69-520564eacd73 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.295292] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f882fa-4f26-49ce-a2b7-0b9446288448 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.311289] env[61629]: DEBUG nova.compute.provider_tree [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.369794] env[61629]: DEBUG oslo_concurrency.lockutils [req-7f15d553-ce2a-47c7-a765-1db77caf2ad0 req-41e4f2d8-d62e-412d-a0f0-fd46ce688264 service nova] Releasing lock "refresh_cache-0d21b352-bdd0-4887-8658-cd5c448352d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.396341] env[61629]: DEBUG nova.compute.manager [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 877.434957] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.566760] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354176, 'name': Rename_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.618353] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526b561c-1fef-0ab1-05af-aaa53a79cf84, 'name': SearchDatastore_Task, 'duration_secs': 0.029693} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.619189] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f851e901-7da3-4ed2-88cd-d1574e766690 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.624932] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 877.624932] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52d4a53e-eaa1-4548-f743-f955bbe14b79" [ 877.624932] env[61629]: _type = "Task" [ 877.624932] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.633610] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d4a53e-eaa1-4548-f743-f955bbe14b79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.816221] env[61629]: DEBUG nova.scheduler.client.report [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 877.886072] env[61629]: DEBUG nova.compute.manager [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 877.914953] env[61629]: DEBUG nova.virt.hardware [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 877.915214] env[61629]: DEBUG nova.virt.hardware [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 877.915369] env[61629]: DEBUG nova.virt.hardware [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.915548] env[61629]: DEBUG nova.virt.hardware [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 877.915692] env[61629]: DEBUG nova.virt.hardware [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.915834] env[61629]: DEBUG nova.virt.hardware [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 877.918912] env[61629]: DEBUG nova.virt.hardware [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 877.918912] env[61629]: DEBUG nova.virt.hardware [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 877.918912] env[61629]: DEBUG nova.virt.hardware [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 877.918912] env[61629]: DEBUG nova.virt.hardware [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 877.918912] env[61629]: DEBUG nova.virt.hardware [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 877.918912] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7856a08f-6141-4819-a6c6-6c009279cbe0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.928945] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4a6eac-1a29-4e2d-bebe-c330bf4256c0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.935627] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.066618] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354176, 'name': Rename_Task, 'duration_secs': 1.381427} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.067131] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 878.067363] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68b86c62-1502-4cdd-bae4-ddc0761fe02a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.073610] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 878.073610] env[61629]: value = "task-1354177" [ 878.073610] env[61629]: _type = "Task" [ 878.073610] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.081499] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354177, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.136081] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d4a53e-eaa1-4548-f743-f955bbe14b79, 'name': SearchDatastore_Task, 'duration_secs': 0.012901} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.136552] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.136861] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 0d21b352-bdd0-4887-8658-cd5c448352d2/0d21b352-bdd0-4887-8658-cd5c448352d2.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 878.137253] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36312c1d-df62-4115-86d3-1189873c45b1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.146174] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 878.146174] env[61629]: value = "task-1354178" [ 878.146174] env[61629]: _type = "Task" [ 878.146174] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.155209] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354178, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.320023] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.320560] env[61629]: DEBUG nova.compute.manager [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 878.324157] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.560s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.325692] env[61629]: INFO nova.compute.claims [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.586233] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354177, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.662095] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354178, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.740673] env[61629]: DEBUG nova.compute.manager [req-e3451263-c92e-4482-93d1-5fe8f636c007 req-c3476f85-2b10-4023-826a-638ce8a75602 service nova] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Received event network-vif-plugged-91c99a2f-21ee-45af-9321-329b2cfb1799 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 878.740982] env[61629]: DEBUG oslo_concurrency.lockutils [req-e3451263-c92e-4482-93d1-5fe8f636c007 req-c3476f85-2b10-4023-826a-638ce8a75602 service nova] Acquiring lock "55f2d2fc-9404-422f-ba08-72e6e11a089f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.741315] env[61629]: DEBUG oslo_concurrency.lockutils [req-e3451263-c92e-4482-93d1-5fe8f636c007 req-c3476f85-2b10-4023-826a-638ce8a75602 service nova] Lock "55f2d2fc-9404-422f-ba08-72e6e11a089f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.741861] env[61629]: DEBUG oslo_concurrency.lockutils [req-e3451263-c92e-4482-93d1-5fe8f636c007 req-c3476f85-2b10-4023-826a-638ce8a75602 service nova] Lock "55f2d2fc-9404-422f-ba08-72e6e11a089f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.742122] env[61629]: DEBUG nova.compute.manager [req-e3451263-c92e-4482-93d1-5fe8f636c007 req-c3476f85-2b10-4023-826a-638ce8a75602 service nova] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] No waiting events found dispatching network-vif-plugged-91c99a2f-21ee-45af-9321-329b2cfb1799 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 878.742370] env[61629]: WARNING nova.compute.manager [req-e3451263-c92e-4482-93d1-5fe8f636c007 req-c3476f85-2b10-4023-826a-638ce8a75602 service nova] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Received unexpected event network-vif-plugged-91c99a2f-21ee-45af-9321-329b2cfb1799 for instance with vm_state building and task_state spawning. [ 878.810849] env[61629]: DEBUG nova.network.neutron [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Successfully updated port: 91c99a2f-21ee-45af-9321-329b2cfb1799 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 878.826103] env[61629]: DEBUG nova.compute.utils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 878.827616] env[61629]: DEBUG nova.compute.manager [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 878.827789] env[61629]: DEBUG nova.network.neutron [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 878.886308] env[61629]: DEBUG nova.policy [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74afa0283b8a4ddfaf613a814c43d316', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d4a3c8a44624afb85154572bbf29483', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 879.092384] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354177, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.163235] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354178, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.634731} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.163644] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 0d21b352-bdd0-4887-8658-cd5c448352d2/0d21b352-bdd0-4887-8658-cd5c448352d2.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 879.163942] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 879.165063] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3de6846-94f4-485d-8651-56c64e932436 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.175230] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 879.175230] env[61629]: value = "task-1354179" [ 879.175230] env[61629]: _type = "Task" [ 879.175230] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.184746] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354179, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.207489] env[61629]: DEBUG nova.network.neutron [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Successfully created port: 75a8ffd8-5a4a-4846-8213-980da8efc581 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 879.314301] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "refresh_cache-55f2d2fc-9404-422f-ba08-72e6e11a089f" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.314301] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "refresh_cache-55f2d2fc-9404-422f-ba08-72e6e11a089f" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.314301] env[61629]: DEBUG nova.network.neutron [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 879.336391] env[61629]: DEBUG nova.compute.manager [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 879.589744] env[61629]: DEBUG oslo_vmware.api [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354177, 'name': PowerOnVM_Task, 'duration_secs': 1.116376} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.590151] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 879.590809] env[61629]: INFO nova.compute.manager [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Took 10.37 seconds to spawn the instance on the hypervisor. [ 879.591264] env[61629]: DEBUG nova.compute.manager [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 879.592325] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d588140a-80b8-4ce6-a43d-927344ae06d4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.666444] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4c09e2-c059-4805-bda5-5d44cd0d8a99 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.674471] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691a99f0-ba0c-4f1d-9635-e3496884e7d1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.686271] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354179, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079786} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.709642] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 879.710091] env[61629]: DEBUG oslo_concurrency.lockutils [None req-933695a1-f970-44f7-a9b9-807fcbdb80da tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.710317] env[61629]: DEBUG oslo_concurrency.lockutils [None req-933695a1-f970-44f7-a9b9-807fcbdb80da tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.712199] env[61629]: DEBUG nova.compute.manager [None req-933695a1-f970-44f7-a9b9-807fcbdb80da tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 879.712514] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f628f63b-bc5b-441c-95f8-5ea538316bd8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.718071] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80e6f2c-ef16-484b-ad3f-73c0ac97e457 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.721244] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e072c39d-122f-4d42-a081-2395fc472382 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.728707] env[61629]: DEBUG nova.compute.manager [None req-933695a1-f970-44f7-a9b9-807fcbdb80da tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61629) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 879.729514] env[61629]: DEBUG nova.objects.instance [None req-933695a1-f970-44f7-a9b9-807fcbdb80da tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lazy-loading 'flavor' on Instance uuid 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 879.754671] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 0d21b352-bdd0-4887-8658-cd5c448352d2/0d21b352-bdd0-4887-8658-cd5c448352d2.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 879.755393] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a291c19a-579a-4529-b209-29c2eb5ce34a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.770997] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5b27b6-6c15-4b55-9ebe-067e93c323ca {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.787842] env[61629]: DEBUG nova.compute.provider_tree [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.791439] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 879.791439] env[61629]: value = "task-1354180" [ 879.791439] env[61629]: _type = "Task" [ 879.791439] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.801413] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354180, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.866895] env[61629]: DEBUG nova.network.neutron [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 880.117596] env[61629]: INFO nova.compute.manager [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Took 30.82 seconds to build instance. [ 880.123479] env[61629]: DEBUG nova.network.neutron [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Updating instance_info_cache with network_info: [{"id": "91c99a2f-21ee-45af-9321-329b2cfb1799", "address": "fa:16:3e:c7:b4:55", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91c99a2f-21", "ovs_interfaceid": "91c99a2f-21ee-45af-9321-329b2cfb1799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.259524] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-933695a1-f970-44f7-a9b9-807fcbdb80da tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 880.259827] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-825e6ad7-3652-498a-b06e-b3ef8a74266d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.267268] env[61629]: DEBUG oslo_vmware.api [None req-933695a1-f970-44f7-a9b9-807fcbdb80da tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 880.267268] env[61629]: value = "task-1354181" [ 880.267268] env[61629]: _type = "Task" [ 880.267268] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.277464] env[61629]: DEBUG oslo_vmware.api [None req-933695a1-f970-44f7-a9b9-807fcbdb80da tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354181, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.296017] env[61629]: DEBUG nova.scheduler.client.report [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 880.309134] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354180, 'name': ReconfigVM_Task, 'duration_secs': 0.309138} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.309478] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 0d21b352-bdd0-4887-8658-cd5c448352d2/0d21b352-bdd0-4887-8658-cd5c448352d2.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 880.310156] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c6e8447-7208-44be-aad8-6445cc9cdbdc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.317842] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 880.317842] env[61629]: value = "task-1354182" [ 880.317842] env[61629]: _type = "Task" [ 880.317842] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.328262] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354182, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.344045] env[61629]: DEBUG nova.compute.manager [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 880.375761] env[61629]: DEBUG nova.virt.hardware [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 880.376142] env[61629]: DEBUG nova.virt.hardware [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 880.376412] env[61629]: DEBUG nova.virt.hardware [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.376725] env[61629]: DEBUG nova.virt.hardware [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 880.376917] env[61629]: DEBUG nova.virt.hardware [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.377130] env[61629]: DEBUG nova.virt.hardware [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 880.377450] env[61629]: DEBUG nova.virt.hardware [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 880.377686] env[61629]: DEBUG nova.virt.hardware [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 880.377903] env[61629]: DEBUG nova.virt.hardware [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 880.378152] env[61629]: DEBUG nova.virt.hardware [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 880.378388] env[61629]: DEBUG nova.virt.hardware [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 880.379366] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754d6c8f-c296-4620-8ccc-39c336482798 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.389060] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77869ad-fcf4-49e5-811a-63131d2aa604 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.621831] env[61629]: DEBUG oslo_concurrency.lockutils [None req-577042ba-4983-4b0c-8701-c97c83f2c1dc tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "87a1383f-d66b-4bde-b153-89ac62ff8390" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.394s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.626811] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "refresh_cache-55f2d2fc-9404-422f-ba08-72e6e11a089f" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.626811] env[61629]: DEBUG nova.compute.manager [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Instance network_info: |[{"id": "91c99a2f-21ee-45af-9321-329b2cfb1799", "address": "fa:16:3e:c7:b4:55", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91c99a2f-21", "ovs_interfaceid": "91c99a2f-21ee-45af-9321-329b2cfb1799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 880.627312] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:b4:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91c99a2f-21ee-45af-9321-329b2cfb1799', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 880.636448] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Creating folder: Project (0578ce75c37942d4ba6c8b862ceb7d92). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 880.636813] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37a7fb7a-9caf-44f5-bbba-12f22fa3c6a8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.656367] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Created folder: Project (0578ce75c37942d4ba6c8b862ceb7d92) in parent group-v288443. [ 880.656367] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Creating folder: Instances. Parent ref: group-v288505. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 880.656367] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24a3ae89-83ae-4631-a99e-da589480d7a1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.672036] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Created folder: Instances in parent group-v288505. [ 880.672036] env[61629]: DEBUG oslo.service.loopingcall [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.672036] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 880.672036] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e73298c-5b6c-41e2-8ce0-d11f2e786391 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.695634] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 880.695634] env[61629]: value = "task-1354185" [ 880.695634] env[61629]: _type = "Task" [ 880.695634] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.705383] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354185, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.782020] env[61629]: DEBUG oslo_vmware.api [None req-933695a1-f970-44f7-a9b9-807fcbdb80da tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354181, 'name': PowerOffVM_Task, 'duration_secs': 0.25181} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.782020] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-933695a1-f970-44f7-a9b9-807fcbdb80da tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 880.782020] env[61629]: DEBUG nova.compute.manager [None req-933695a1-f970-44f7-a9b9-807fcbdb80da tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 880.782020] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139170b2-9c14-475f-b1ac-c0e0526b1138 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.804147] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.480s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.804519] env[61629]: DEBUG nova.compute.manager [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 880.808113] env[61629]: DEBUG nova.compute.manager [req-dbbcca4f-ee38-4261-bc5f-90157def9c15 req-a6c3e5ca-3bb0-4ca9-bb10-a3271e78c1aa service nova] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Received event network-changed-91c99a2f-21ee-45af-9321-329b2cfb1799 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 880.808332] env[61629]: DEBUG nova.compute.manager [req-dbbcca4f-ee38-4261-bc5f-90157def9c15 req-a6c3e5ca-3bb0-4ca9-bb10-a3271e78c1aa service nova] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Refreshing instance network info cache due to event network-changed-91c99a2f-21ee-45af-9321-329b2cfb1799. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 880.808569] env[61629]: DEBUG oslo_concurrency.lockutils [req-dbbcca4f-ee38-4261-bc5f-90157def9c15 req-a6c3e5ca-3bb0-4ca9-bb10-a3271e78c1aa service nova] Acquiring lock "refresh_cache-55f2d2fc-9404-422f-ba08-72e6e11a089f" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.808713] env[61629]: DEBUG oslo_concurrency.lockutils [req-dbbcca4f-ee38-4261-bc5f-90157def9c15 req-a6c3e5ca-3bb0-4ca9-bb10-a3271e78c1aa service nova] Acquired lock "refresh_cache-55f2d2fc-9404-422f-ba08-72e6e11a089f" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.808872] env[61629]: DEBUG nova.network.neutron [req-dbbcca4f-ee38-4261-bc5f-90157def9c15 req-a6c3e5ca-3bb0-4ca9-bb10-a3271e78c1aa service nova] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Refreshing network info cache for port 91c99a2f-21ee-45af-9321-329b2cfb1799 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 880.813030] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.294s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.813030] env[61629]: DEBUG nova.objects.instance [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lazy-loading 'resources' on Instance uuid c3f830d6-8999-49d5-a431-b09dfdaf8313 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 880.829919] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354182, 'name': Rename_Task, 'duration_secs': 0.194184} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.834028] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 880.834028] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f51b0766-c732-457f-9870-99fec556a727 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.838891] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 880.838891] env[61629]: value = "task-1354186" [ 880.838891] env[61629]: _type = "Task" [ 880.838891] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.849173] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354186, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.124852] env[61629]: DEBUG nova.compute.manager [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 881.207979] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354185, 'name': CreateVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.298848] env[61629]: DEBUG oslo_concurrency.lockutils [None req-933695a1-f970-44f7-a9b9-807fcbdb80da tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.588s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.311872] env[61629]: DEBUG nova.network.neutron [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Successfully updated port: 75a8ffd8-5a4a-4846-8213-980da8efc581 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 881.319048] env[61629]: DEBUG nova.compute.utils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 881.326929] env[61629]: DEBUG nova.compute.manager [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 881.326929] env[61629]: DEBUG nova.network.neutron [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 881.352506] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354186, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.463107] env[61629]: DEBUG nova.policy [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3cbfdc70fad64e8ab37fb9e0c1a10e0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bc538b7901b4d65a6107db047063183', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 881.635628] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d240029-b748-41ef-9159-cd498ed32550 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.644842] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb10e33-c8ee-45e8-9544-3af2eeab4659 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.653930] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.688847] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9245ba-8bae-408f-b7c3-fad40dd9632d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.701848] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a260d4-6c47-49f6-ae9e-3c7fd9399d1c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.715277] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354185, 'name': CreateVM_Task, 'duration_secs': 0.528092} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.725394] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 881.726401] env[61629]: DEBUG nova.compute.provider_tree [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.729077] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.729319] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.729667] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 881.730367] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b29e0bd3-694c-4054-83be-0b99bfd17999 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.735988] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 881.735988] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52f4adc7-1d67-31ef-1ac3-68cde0c7d905" [ 881.735988] env[61629]: _type = "Task" [ 881.735988] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.745461] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52f4adc7-1d67-31ef-1ac3-68cde0c7d905, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.827161] env[61629]: DEBUG nova.compute.manager [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 881.830453] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Acquiring lock "refresh_cache-12c6b03b-8295-43de-898f-a6c35f1693b7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.830702] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Acquired lock "refresh_cache-12c6b03b-8295-43de-898f-a6c35f1693b7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.830881] env[61629]: DEBUG nova.network.neutron [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 881.852023] env[61629]: DEBUG oslo_vmware.api [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354186, 'name': PowerOnVM_Task, 'duration_secs': 0.802888} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.852141] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 881.855074] env[61629]: INFO nova.compute.manager [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Took 8.35 seconds to spawn the instance on the hypervisor. [ 881.855074] env[61629]: DEBUG nova.compute.manager [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 881.855074] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e7f304-2be6-481f-abf9-639c3cfcdf0f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.932613] env[61629]: DEBUG nova.network.neutron [req-dbbcca4f-ee38-4261-bc5f-90157def9c15 req-a6c3e5ca-3bb0-4ca9-bb10-a3271e78c1aa service nova] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Updated VIF entry in instance network info cache for port 91c99a2f-21ee-45af-9321-329b2cfb1799. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 881.932955] env[61629]: DEBUG nova.network.neutron [req-dbbcca4f-ee38-4261-bc5f-90157def9c15 req-a6c3e5ca-3bb0-4ca9-bb10-a3271e78c1aa service nova] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Updating instance_info_cache with network_info: [{"id": "91c99a2f-21ee-45af-9321-329b2cfb1799", "address": "fa:16:3e:c7:b4:55", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91c99a2f-21", "ovs_interfaceid": "91c99a2f-21ee-45af-9321-329b2cfb1799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.183721] env[61629]: DEBUG nova.network.neutron [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Successfully created port: e635a96a-7254-4754-9409-d9fc4a443cb5 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 882.210378] env[61629]: DEBUG nova.objects.instance [None req-4c393978-3056-45c8-b340-634193491d9e tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lazy-loading 'flavor' on Instance uuid 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 882.231198] env[61629]: DEBUG nova.scheduler.client.report [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 882.249246] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52f4adc7-1d67-31ef-1ac3-68cde0c7d905, 'name': SearchDatastore_Task, 'duration_secs': 0.012168} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.250156] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.250691] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.251601] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.251601] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.251601] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.252282] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3fb601eb-c7fe-4df0-9d29-fc9d03ef56d7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.268635] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.268635] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 882.269623] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fddc36f7-a7e1-4020-a08d-63836c8176d0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.278528] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 882.278528] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]522ddea5-cc38-a053-3349-eb27dfb5691f" [ 882.278528] env[61629]: _type = "Task" [ 882.278528] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.282965] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]522ddea5-cc38-a053-3349-eb27dfb5691f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.371665] env[61629]: INFO nova.compute.manager [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Took 30.94 seconds to build instance. [ 882.417992] env[61629]: DEBUG nova.network.neutron [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 882.435949] env[61629]: DEBUG oslo_concurrency.lockutils [req-dbbcca4f-ee38-4261-bc5f-90157def9c15 req-a6c3e5ca-3bb0-4ca9-bb10-a3271e78c1aa service nova] Releasing lock "refresh_cache-55f2d2fc-9404-422f-ba08-72e6e11a089f" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.718585] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4c393978-3056-45c8-b340-634193491d9e tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "refresh_cache-9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.718585] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4c393978-3056-45c8-b340-634193491d9e tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquired lock "refresh_cache-9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.718585] env[61629]: DEBUG nova.network.neutron [None req-4c393978-3056-45c8-b340-634193491d9e tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 882.718585] env[61629]: DEBUG nova.objects.instance [None req-4c393978-3056-45c8-b340-634193491d9e tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lazy-loading 'info_cache' on Instance uuid 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 882.737350] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.927s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.740393] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.423s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.740393] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.740393] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61629) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 882.740797] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.153s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.742913] env[61629]: INFO nova.compute.claims [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 882.745881] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d539baa1-183d-46a1-b13d-ff0f5a35a7fb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.755919] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f159cd-5ca8-4aa3-8d96-421753289bb4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.774626] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7142ed43-3d85-4b92-8f6f-d5ca6c4e7328 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.779888] env[61629]: INFO nova.scheduler.client.report [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Deleted allocations for instance c3f830d6-8999-49d5-a431-b09dfdaf8313 [ 882.800542] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd44159-28f1-4c83-89e2-c619cd2a77b8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.804619] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]522ddea5-cc38-a053-3349-eb27dfb5691f, 'name': SearchDatastore_Task, 'duration_secs': 0.035522} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.807239] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b51d34fe-edcc-429b-a42c-793fa6e15ab6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.836262] env[61629]: INFO nova.compute.manager [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Rescuing [ 882.836518] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "refresh_cache-0d21b352-bdd0-4887-8658-cd5c448352d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.836666] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "refresh_cache-0d21b352-bdd0-4887-8658-cd5c448352d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.837386] env[61629]: DEBUG nova.network.neutron [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 882.838278] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181040MB free_disk=151GB free_vcpus=48 pci_devices=None {{(pid=61629) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 882.838446] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.846327] env[61629]: DEBUG nova.compute.manager [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 882.848524] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 882.848524] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]529f0e9e-3acf-4d37-e5bb-076a887419bf" [ 882.848524] env[61629]: _type = "Task" [ 882.848524] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.853512] env[61629]: DEBUG nova.network.neutron [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Updating instance_info_cache with network_info: [{"id": "75a8ffd8-5a4a-4846-8213-980da8efc581", "address": "fa:16:3e:4b:f0:f4", "network": {"id": "86661513-aef4-4dee-a3f2-8eaacd8fddf7", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-224958807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d4a3c8a44624afb85154572bbf29483", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75a8ffd8-5a", "ovs_interfaceid": "75a8ffd8-5a4a-4846-8213-980da8efc581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.862392] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]529f0e9e-3acf-4d37-e5bb-076a887419bf, 'name': SearchDatastore_Task, 'duration_secs': 0.015384} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.862994] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.865268] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 55f2d2fc-9404-422f-ba08-72e6e11a089f/55f2d2fc-9404-422f-ba08-72e6e11a089f.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 882.865268] env[61629]: DEBUG nova.compute.manager [req-05f117d8-7955-4c34-adb0-07fb1a46f862 req-6bd7c14d-3af8-4986-b5e2-f18173c4d613 service nova] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Received event network-vif-plugged-75a8ffd8-5a4a-4846-8213-980da8efc581 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 882.865268] env[61629]: DEBUG oslo_concurrency.lockutils [req-05f117d8-7955-4c34-adb0-07fb1a46f862 req-6bd7c14d-3af8-4986-b5e2-f18173c4d613 service nova] Acquiring lock "12c6b03b-8295-43de-898f-a6c35f1693b7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.865268] env[61629]: DEBUG oslo_concurrency.lockutils [req-05f117d8-7955-4c34-adb0-07fb1a46f862 req-6bd7c14d-3af8-4986-b5e2-f18173c4d613 service nova] Lock "12c6b03b-8295-43de-898f-a6c35f1693b7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.865268] env[61629]: DEBUG oslo_concurrency.lockutils [req-05f117d8-7955-4c34-adb0-07fb1a46f862 req-6bd7c14d-3af8-4986-b5e2-f18173c4d613 service nova] Lock "12c6b03b-8295-43de-898f-a6c35f1693b7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.865268] env[61629]: DEBUG nova.compute.manager [req-05f117d8-7955-4c34-adb0-07fb1a46f862 req-6bd7c14d-3af8-4986-b5e2-f18173c4d613 service nova] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] No waiting events found dispatching network-vif-plugged-75a8ffd8-5a4a-4846-8213-980da8efc581 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 882.865621] env[61629]: WARNING nova.compute.manager [req-05f117d8-7955-4c34-adb0-07fb1a46f862 req-6bd7c14d-3af8-4986-b5e2-f18173c4d613 service nova] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Received unexpected event network-vif-plugged-75a8ffd8-5a4a-4846-8213-980da8efc581 for instance with vm_state building and task_state spawning. [ 882.865621] env[61629]: DEBUG nova.compute.manager [req-05f117d8-7955-4c34-adb0-07fb1a46f862 req-6bd7c14d-3af8-4986-b5e2-f18173c4d613 service nova] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Received event network-changed-75a8ffd8-5a4a-4846-8213-980da8efc581 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 882.865682] env[61629]: DEBUG nova.compute.manager [req-05f117d8-7955-4c34-adb0-07fb1a46f862 req-6bd7c14d-3af8-4986-b5e2-f18173c4d613 service nova] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Refreshing instance network info cache due to event network-changed-75a8ffd8-5a4a-4846-8213-980da8efc581. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 882.866226] env[61629]: DEBUG oslo_concurrency.lockutils [req-05f117d8-7955-4c34-adb0-07fb1a46f862 req-6bd7c14d-3af8-4986-b5e2-f18173c4d613 service nova] Acquiring lock "refresh_cache-12c6b03b-8295-43de-898f-a6c35f1693b7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.866226] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40cb752e-1911-4c84-84b8-351a2e4b4924 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.876123] env[61629]: DEBUG oslo_concurrency.lockutils [None req-91344840-c466-490e-a813-474954dfad16 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "0d21b352-bdd0-4887-8658-cd5c448352d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.296s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.876315] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 882.876315] env[61629]: value = "task-1354187" [ 882.876315] env[61629]: _type = "Task" [ 882.876315] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.888534] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354187, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.897151] env[61629]: DEBUG nova.virt.hardware [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 882.897425] env[61629]: DEBUG nova.virt.hardware [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 882.897591] env[61629]: DEBUG nova.virt.hardware [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.897790] env[61629]: DEBUG nova.virt.hardware [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 882.897941] env[61629]: DEBUG nova.virt.hardware [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.898133] env[61629]: DEBUG nova.virt.hardware [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 882.898353] env[61629]: DEBUG nova.virt.hardware [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 882.898528] env[61629]: DEBUG nova.virt.hardware [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 882.898729] env[61629]: DEBUG nova.virt.hardware [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 882.898883] env[61629]: DEBUG nova.virt.hardware [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 882.899066] env[61629]: DEBUG nova.virt.hardware [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 882.899992] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52690ad4-b3d2-4bd9-aed2-c214ff526398 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.909398] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bea084-98cc-45be-8c97-ae0a01b34005 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.220821] env[61629]: DEBUG nova.objects.base [None req-4c393978-3056-45c8-b340-634193491d9e tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Object Instance<9c340ca1-75e0-4d65-8aae-0d5e11ff3e66> lazy-loaded attributes: flavor,info_cache {{(pid=61629) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 883.298184] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7b3fb6f5-7b45-49fc-998e-441201f47c95 tempest-VolumesAdminNegativeTest-1601693871 tempest-VolumesAdminNegativeTest-1601693871-project-member] Lock "c3f830d6-8999-49d5-a431-b09dfdaf8313" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.124s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.356835] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Releasing lock "refresh_cache-12c6b03b-8295-43de-898f-a6c35f1693b7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.357372] env[61629]: DEBUG nova.compute.manager [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Instance network_info: |[{"id": "75a8ffd8-5a4a-4846-8213-980da8efc581", "address": "fa:16:3e:4b:f0:f4", "network": {"id": "86661513-aef4-4dee-a3f2-8eaacd8fddf7", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-224958807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d4a3c8a44624afb85154572bbf29483", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75a8ffd8-5a", "ovs_interfaceid": "75a8ffd8-5a4a-4846-8213-980da8efc581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 883.358106] env[61629]: DEBUG oslo_concurrency.lockutils [req-05f117d8-7955-4c34-adb0-07fb1a46f862 req-6bd7c14d-3af8-4986-b5e2-f18173c4d613 service nova] Acquired lock "refresh_cache-12c6b03b-8295-43de-898f-a6c35f1693b7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.358408] env[61629]: DEBUG nova.network.neutron [req-05f117d8-7955-4c34-adb0-07fb1a46f862 req-6bd7c14d-3af8-4986-b5e2-f18173c4d613 service nova] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Refreshing network info cache for port 75a8ffd8-5a4a-4846-8213-980da8efc581 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 883.363020] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:f0:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75a8ffd8-5a4a-4846-8213-980da8efc581', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.367659] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Creating folder: Project (3d4a3c8a44624afb85154572bbf29483). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 883.369216] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf6c9cc8-98f1-4d54-9fe5-b387b6087222 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.382587] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Created folder: Project (3d4a3c8a44624afb85154572bbf29483) in parent group-v288443. [ 883.382794] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Creating folder: Instances. Parent ref: group-v288508. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 883.383451] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb915bb1-8295-4fb4-8e89-a59c4ab62c56 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.391774] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354187, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.401233] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Created folder: Instances in parent group-v288508. [ 883.401456] env[61629]: DEBUG oslo.service.loopingcall [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.401685] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 883.401915] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0d8e4a1-8df0-4634-a308-58d8b9f33fd9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.423747] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.423747] env[61629]: value = "task-1354190" [ 883.423747] env[61629]: _type = "Task" [ 883.423747] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.435623] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354190, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.894025] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354187, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.737275} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.898796] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 55f2d2fc-9404-422f-ba08-72e6e11a089f/55f2d2fc-9404-422f-ba08-72e6e11a089f.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 883.899297] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 883.899911] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3b6b50b9-64c6-464c-90d1-cc59924458d1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.912998] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 883.912998] env[61629]: value = "task-1354191" [ 883.912998] env[61629]: _type = "Task" [ 883.912998] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.925018] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354191, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.934389] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354190, 'name': CreateVM_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.054552] env[61629]: DEBUG nova.network.neutron [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Updating instance_info_cache with network_info: [{"id": "91e4e033-337e-4a36-a5a7-a54b29cc6531", "address": "fa:16:3e:76:a0:44", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91e4e033-33", "ovs_interfaceid": "91e4e033-337e-4a36-a5a7-a54b29cc6531", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.065114] env[61629]: DEBUG nova.network.neutron [None req-4c393978-3056-45c8-b340-634193491d9e tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Updating instance_info_cache with network_info: [{"id": "85b39faa-8b58-4b86-b4df-a4b98f2a5325", "address": "fa:16:3e:f8:40:65", "network": {"id": "4e6a4470-c260-4226-9409-37a70cc1e8c1", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-817073300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6165d04bf0a468faaab339addeaa59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85b39faa-8b", "ovs_interfaceid": "85b39faa-8b58-4b86-b4df-a4b98f2a5325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.070216] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c195624-5fd0-4c48-9def-bae2ab014527 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.077747] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215dc7a8-27b6-4855-a1e5-f556741ccb1e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.118280] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156fc4e5-de52-4336-b3f9-0110beda06d2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.126687] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9df40d9-8ca9-4e04-a566-d9becde7f29b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.142511] env[61629]: DEBUG nova.compute.provider_tree [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.424183] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354191, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072172} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.431182] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 884.431182] env[61629]: DEBUG nova.network.neutron [req-05f117d8-7955-4c34-adb0-07fb1a46f862 req-6bd7c14d-3af8-4986-b5e2-f18173c4d613 service nova] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Updated VIF entry in instance network info cache for port 75a8ffd8-5a4a-4846-8213-980da8efc581. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 884.431182] env[61629]: DEBUG nova.network.neutron [req-05f117d8-7955-4c34-adb0-07fb1a46f862 req-6bd7c14d-3af8-4986-b5e2-f18173c4d613 service nova] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Updating instance_info_cache with network_info: [{"id": "75a8ffd8-5a4a-4846-8213-980da8efc581", "address": "fa:16:3e:4b:f0:f4", "network": {"id": "86661513-aef4-4dee-a3f2-8eaacd8fddf7", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-224958807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d4a3c8a44624afb85154572bbf29483", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75a8ffd8-5a", "ovs_interfaceid": "75a8ffd8-5a4a-4846-8213-980da8efc581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.432116] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721ee800-8edd-4102-b0be-08dbee747aff {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.465021] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 55f2d2fc-9404-422f-ba08-72e6e11a089f/55f2d2fc-9404-422f-ba08-72e6e11a089f.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 884.470628] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24e1e7ed-ee04-4260-bf3f-3e4e9c3989e4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.484277] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354190, 'name': CreateVM_Task, 'duration_secs': 0.624953} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.484814] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 884.485966] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.486163] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.486475] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 884.486727] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df274e61-a466-411f-9215-8993e982432b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.490829] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 884.490829] env[61629]: value = "task-1354192" [ 884.490829] env[61629]: _type = "Task" [ 884.490829] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.492116] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Waiting for the task: (returnval){ [ 884.492116] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52f754d1-7967-6508-ab4b-fc54e66094b8" [ 884.492116] env[61629]: _type = "Task" [ 884.492116] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.505056] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354192, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.508671] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52f754d1-7967-6508-ab4b-fc54e66094b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.557837] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "refresh_cache-0d21b352-bdd0-4887-8658-cd5c448352d2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.560924] env[61629]: DEBUG nova.network.neutron [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Successfully updated port: e635a96a-7254-4754-9409-d9fc4a443cb5 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 884.574953] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4c393978-3056-45c8-b340-634193491d9e tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Releasing lock "refresh_cache-9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.645843] env[61629]: DEBUG nova.scheduler.client.report [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 884.911080] env[61629]: DEBUG nova.compute.manager [req-c9e878e2-d766-4432-9441-9f65a127ec5f req-d74393a4-0661-42f7-8581-44f4048f49ef service nova] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Received event network-vif-plugged-e635a96a-7254-4754-9409-d9fc4a443cb5 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 884.911324] env[61629]: DEBUG oslo_concurrency.lockutils [req-c9e878e2-d766-4432-9441-9f65a127ec5f req-d74393a4-0661-42f7-8581-44f4048f49ef service nova] Acquiring lock "1d451558-dbbc-4942-b739-5d4b88057a75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.911536] env[61629]: DEBUG oslo_concurrency.lockutils [req-c9e878e2-d766-4432-9441-9f65a127ec5f req-d74393a4-0661-42f7-8581-44f4048f49ef service nova] Lock "1d451558-dbbc-4942-b739-5d4b88057a75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.911734] env[61629]: DEBUG oslo_concurrency.lockutils [req-c9e878e2-d766-4432-9441-9f65a127ec5f req-d74393a4-0661-42f7-8581-44f4048f49ef service nova] Lock "1d451558-dbbc-4942-b739-5d4b88057a75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.911868] env[61629]: DEBUG nova.compute.manager [req-c9e878e2-d766-4432-9441-9f65a127ec5f req-d74393a4-0661-42f7-8581-44f4048f49ef service nova] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] No waiting events found dispatching network-vif-plugged-e635a96a-7254-4754-9409-d9fc4a443cb5 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 884.912150] env[61629]: WARNING nova.compute.manager [req-c9e878e2-d766-4432-9441-9f65a127ec5f req-d74393a4-0661-42f7-8581-44f4048f49ef service nova] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Received unexpected event network-vif-plugged-e635a96a-7254-4754-9409-d9fc4a443cb5 for instance with vm_state building and task_state spawning. [ 884.912375] env[61629]: DEBUG nova.compute.manager [req-c9e878e2-d766-4432-9441-9f65a127ec5f req-d74393a4-0661-42f7-8581-44f4048f49ef service nova] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Received event network-changed-e635a96a-7254-4754-9409-d9fc4a443cb5 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 884.912539] env[61629]: DEBUG nova.compute.manager [req-c9e878e2-d766-4432-9441-9f65a127ec5f req-d74393a4-0661-42f7-8581-44f4048f49ef service nova] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Refreshing instance network info cache due to event network-changed-e635a96a-7254-4754-9409-d9fc4a443cb5. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 884.912728] env[61629]: DEBUG oslo_concurrency.lockutils [req-c9e878e2-d766-4432-9441-9f65a127ec5f req-d74393a4-0661-42f7-8581-44f4048f49ef service nova] Acquiring lock "refresh_cache-1d451558-dbbc-4942-b739-5d4b88057a75" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.912863] env[61629]: DEBUG oslo_concurrency.lockutils [req-c9e878e2-d766-4432-9441-9f65a127ec5f req-d74393a4-0661-42f7-8581-44f4048f49ef service nova] Acquired lock "refresh_cache-1d451558-dbbc-4942-b739-5d4b88057a75" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.913031] env[61629]: DEBUG nova.network.neutron [req-c9e878e2-d766-4432-9441-9f65a127ec5f req-d74393a4-0661-42f7-8581-44f4048f49ef service nova] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Refreshing network info cache for port e635a96a-7254-4754-9409-d9fc4a443cb5 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 884.936308] env[61629]: DEBUG oslo_concurrency.lockutils [req-05f117d8-7955-4c34-adb0-07fb1a46f862 req-6bd7c14d-3af8-4986-b5e2-f18173c4d613 service nova] Releasing lock "refresh_cache-12c6b03b-8295-43de-898f-a6c35f1693b7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.005660] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52f754d1-7967-6508-ab4b-fc54e66094b8, 'name': SearchDatastore_Task, 'duration_secs': 0.021349} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.009760] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.009760] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.009760] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.009760] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.010039] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 885.010114] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354192, 'name': ReconfigVM_Task, 'duration_secs': 0.302851} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.010315] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-489cf062-b969-4f50-aafc-eb35b44df2c0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.012137] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 55f2d2fc-9404-422f-ba08-72e6e11a089f/55f2d2fc-9404-422f-ba08-72e6e11a089f.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.012764] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e931655-2ca9-40ff-aca8-07aaa0932a05 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.024022] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 885.024022] env[61629]: value = "task-1354193" [ 885.024022] env[61629]: _type = "Task" [ 885.024022] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.036039] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354193, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.037689] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 885.037841] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 885.041542] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa2a7c4a-50da-4919-b60a-e2aa0f5e73c8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.048380] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Waiting for the task: (returnval){ [ 885.048380] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52c585fd-1f1a-2b29-8ffc-1326ac00da59" [ 885.048380] env[61629]: _type = "Task" [ 885.048380] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.060017] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c585fd-1f1a-2b29-8ffc-1326ac00da59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.066578] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "refresh_cache-1d451558-dbbc-4942-b739-5d4b88057a75" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.080792] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c393978-3056-45c8-b340-634193491d9e tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 885.082209] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f86712af-1be4-4693-be29-644a545fcab7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.088618] env[61629]: DEBUG oslo_vmware.api [None req-4c393978-3056-45c8-b340-634193491d9e tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 885.088618] env[61629]: value = "task-1354194" [ 885.088618] env[61629]: _type = "Task" [ 885.088618] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.101186] env[61629]: DEBUG oslo_vmware.api [None req-4c393978-3056-45c8-b340-634193491d9e tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354194, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.101731] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 885.101961] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-712023e4-c826-4e39-8f93-3a18ed2c3225 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.108510] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 885.108510] env[61629]: value = "task-1354195" [ 885.108510] env[61629]: _type = "Task" [ 885.108510] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.119133] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354195, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.152487] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.153111] env[61629]: DEBUG nova.compute.manager [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 885.158848] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.874s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.162094] env[61629]: INFO nova.compute.claims [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 885.463156] env[61629]: DEBUG nova.network.neutron [req-c9e878e2-d766-4432-9441-9f65a127ec5f req-d74393a4-0661-42f7-8581-44f4048f49ef service nova] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 885.539458] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354193, 'name': Rename_Task, 'duration_secs': 0.25398} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.540595] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 885.540904] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45012cca-aae5-4dfd-b310-3e036cf2c8d9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.547344] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 885.547344] env[61629]: value = "task-1354196" [ 885.547344] env[61629]: _type = "Task" [ 885.547344] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.561321] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354196, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.570059] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c585fd-1f1a-2b29-8ffc-1326ac00da59, 'name': SearchDatastore_Task, 'duration_secs': 0.034184} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.571268] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-845dbbbc-0ddc-44b1-a698-3006dd0c1655 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.576917] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Waiting for the task: (returnval){ [ 885.576917] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52e15c72-e17f-b360-4527-a17da8cec611" [ 885.576917] env[61629]: _type = "Task" [ 885.576917] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.587098] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52e15c72-e17f-b360-4527-a17da8cec611, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.602195] env[61629]: DEBUG oslo_vmware.api [None req-4c393978-3056-45c8-b340-634193491d9e tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354194, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.618227] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354195, 'name': PowerOffVM_Task, 'duration_secs': 0.267315} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.618227] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.618664] env[61629]: DEBUG nova.network.neutron [req-c9e878e2-d766-4432-9441-9f65a127ec5f req-d74393a4-0661-42f7-8581-44f4048f49ef service nova] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.620388] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59a17c1-bbd8-43ac-a61b-a87e1555e73b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.641059] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186ddc80-1745-44cd-b500-b8d987763d6e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.667504] env[61629]: DEBUG nova.compute.utils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 885.669272] env[61629]: DEBUG nova.compute.manager [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 885.669439] env[61629]: DEBUG nova.network.neutron [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 885.686237] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 885.686437] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b7516dd-90d7-4e95-88fe-cf3ff30bebec {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.694627] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 885.694627] env[61629]: value = "task-1354197" [ 885.694627] env[61629]: _type = "Task" [ 885.694627] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.705095] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] VM already powered off {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 885.705361] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.706230] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.776717] env[61629]: DEBUG nova.policy [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6426746ca5c34435947d9e49eb98b888', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '092c2c52e82042dca2ce57b5f3d7ad2e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 885.848467] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Acquiring lock "2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.848769] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Lock "2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.062024] env[61629]: DEBUG oslo_vmware.api [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354196, 'name': PowerOnVM_Task, 'duration_secs': 0.492214} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.062024] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 886.062024] env[61629]: INFO nova.compute.manager [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Took 8.18 seconds to spawn the instance on the hypervisor. [ 886.062024] env[61629]: DEBUG nova.compute.manager [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 886.063246] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e83695a-5515-4f95-b770-76e520578d5e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.089022] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52e15c72-e17f-b360-4527-a17da8cec611, 'name': SearchDatastore_Task, 'duration_secs': 0.010437} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.089022] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.089022] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 12c6b03b-8295-43de-898f-a6c35f1693b7/12c6b03b-8295-43de-898f-a6c35f1693b7.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 886.089243] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.089321] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 886.089546] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8a16b9c-d8e4-4e9c-b34b-0613a96876aa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.091976] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2bfd7816-300f-478c-b8a5-15d7583d833c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.103800] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Waiting for the task: (returnval){ [ 886.103800] env[61629]: value = "task-1354198" [ 886.103800] env[61629]: _type = "Task" [ 886.103800] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.108377] env[61629]: DEBUG oslo_vmware.api [None req-4c393978-3056-45c8-b340-634193491d9e tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354194, 'name': PowerOnVM_Task, 'duration_secs': 0.812988} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.112839] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c393978-3056-45c8-b340-634193491d9e tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 886.114061] env[61629]: DEBUG nova.compute.manager [None req-4c393978-3056-45c8-b340-634193491d9e tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 886.114061] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 886.114061] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 886.115268] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b256258b-d150-4567-8e72-66f41de36ce6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.118950] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c617958f-4fff-4579-8441-ac68445860d9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.127921] env[61629]: DEBUG oslo_concurrency.lockutils [req-c9e878e2-d766-4432-9441-9f65a127ec5f req-d74393a4-0661-42f7-8581-44f4048f49ef service nova] Releasing lock "refresh_cache-1d451558-dbbc-4942-b739-5d4b88057a75" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.131457] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354198, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.135566] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquired lock "refresh_cache-1d451558-dbbc-4942-b739-5d4b88057a75" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.135566] env[61629]: DEBUG nova.network.neutron [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 886.139335] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 886.139335] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52f54a15-9240-fc09-9fcb-6f6432b7b9de" [ 886.139335] env[61629]: _type = "Task" [ 886.139335] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.151398] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52f54a15-9240-fc09-9fcb-6f6432b7b9de, 'name': SearchDatastore_Task, 'duration_secs': 0.016396} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.152807] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5e4213b-9fe8-4010-82cb-f6a60d1dae00 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.159311] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 886.159311] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]527c3916-4833-3da7-14bb-2df40682e3b7" [ 886.159311] env[61629]: _type = "Task" [ 886.159311] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.168215] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]527c3916-4833-3da7-14bb-2df40682e3b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.175841] env[61629]: DEBUG nova.compute.manager [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 886.287013] env[61629]: DEBUG nova.network.neutron [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Successfully created port: 155236cd-5bf3-4503-8968-010a3af74156 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 886.353508] env[61629]: DEBUG nova.compute.manager [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 886.512698] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b82b864-f86c-4c78-8f61-ae916089c4ff {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.520432] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8450639d-c091-4043-ab93-86cb190b7bd8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.550327] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22617da-7ae7-4e49-9717-030ddc5b2fa6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.558714] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bad705-8a91-4d6e-9210-41b36fe9a530 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.575306] env[61629]: DEBUG nova.compute.provider_tree [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.587536] env[61629]: INFO nova.compute.manager [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Took 27.23 seconds to build instance. [ 886.619517] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354198, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.674271] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]527c3916-4833-3da7-14bb-2df40682e3b7, 'name': SearchDatastore_Task, 'duration_secs': 0.01477} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.674579] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.674849] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 0d21b352-bdd0-4887-8658-cd5c448352d2/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk. {{(pid=61629) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 886.675144] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f13f04c-e8f5-4873-ac60-8312343a1a6a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.687510] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 886.687510] env[61629]: value = "task-1354199" [ 886.687510] env[61629]: _type = "Task" [ 886.687510] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.699471] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354199, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.712774] env[61629]: DEBUG nova.network.neutron [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 886.902342] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.941276] env[61629]: DEBUG nova.network.neutron [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Updating instance_info_cache with network_info: [{"id": "e635a96a-7254-4754-9409-d9fc4a443cb5", "address": "fa:16:3e:19:e3:b5", "network": {"id": "a7538a0c-7a9f-412d-a8ac-6bf1c1969079", "bridge": "br-int", "label": "tempest-ImagesTestJSON-163856709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc538b7901b4d65a6107db047063183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape635a96a-72", "ovs_interfaceid": "e635a96a-7254-4754-9409-d9fc4a443cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.078230] env[61629]: DEBUG nova.scheduler.client.report [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 887.090998] env[61629]: DEBUG oslo_concurrency.lockutils [None req-39f575e4-e73b-4b6f-b23c-8962db62f874 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "55f2d2fc-9404-422f-ba08-72e6e11a089f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.112s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.122016] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354198, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.824139} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.122287] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 12c6b03b-8295-43de-898f-a6c35f1693b7/12c6b03b-8295-43de-898f-a6c35f1693b7.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 887.122509] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 887.122778] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-424ae31f-cd7b-4dec-91e9-8dcbde82ed2e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.131736] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Waiting for the task: (returnval){ [ 887.131736] env[61629]: value = "task-1354200" [ 887.131736] env[61629]: _type = "Task" [ 887.131736] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.142381] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354200, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.184446] env[61629]: DEBUG nova.compute.manager [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 887.200953] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354199, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.220438] env[61629]: DEBUG nova.virt.hardware [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 887.220703] env[61629]: DEBUG nova.virt.hardware [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 887.220889] env[61629]: DEBUG nova.virt.hardware [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 887.221225] env[61629]: DEBUG nova.virt.hardware [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 887.221483] env[61629]: DEBUG nova.virt.hardware [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 887.221700] env[61629]: DEBUG nova.virt.hardware [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 887.222059] env[61629]: DEBUG nova.virt.hardware [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 887.222327] env[61629]: DEBUG nova.virt.hardware [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 887.222601] env[61629]: DEBUG nova.virt.hardware [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 887.222867] env[61629]: DEBUG nova.virt.hardware [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 887.223115] env[61629]: DEBUG nova.virt.hardware [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 887.224095] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50782b61-1966-43ff-b138-21e9689d2623 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.232703] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4cd091-0927-43aa-9ae4-3fe173d49ecf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.445656] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Releasing lock "refresh_cache-1d451558-dbbc-4942-b739-5d4b88057a75" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.446057] env[61629]: DEBUG nova.compute.manager [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Instance network_info: |[{"id": "e635a96a-7254-4754-9409-d9fc4a443cb5", "address": "fa:16:3e:19:e3:b5", "network": {"id": "a7538a0c-7a9f-412d-a8ac-6bf1c1969079", "bridge": "br-int", "label": "tempest-ImagesTestJSON-163856709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bc538b7901b4d65a6107db047063183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape635a96a-72", "ovs_interfaceid": "e635a96a-7254-4754-9409-d9fc4a443cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 887.447435] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:e3:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e635a96a-7254-4754-9409-d9fc4a443cb5', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 887.454670] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Creating folder: Project (4bc538b7901b4d65a6107db047063183). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 887.455842] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-804b3ee2-89ee-48aa-8c5e-d736fa6cd01b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.465682] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Created folder: Project (4bc538b7901b4d65a6107db047063183) in parent group-v288443. [ 887.465879] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Creating folder: Instances. Parent ref: group-v288511. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 887.466151] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d141999e-1891-4083-aaed-d6d69eb558ca {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.475606] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Created folder: Instances in parent group-v288511. [ 887.475836] env[61629]: DEBUG oslo.service.loopingcall [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 887.476043] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 887.479566] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be31412d-081b-4f22-b77e-a9d7edb64993 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.497457] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 887.497457] env[61629]: value = "task-1354203" [ 887.497457] env[61629]: _type = "Task" [ 887.497457] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.506306] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354203, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.587911] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.588508] env[61629]: DEBUG nova.compute.manager [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 887.591369] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.686s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.591689] env[61629]: DEBUG nova.objects.instance [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lazy-loading 'resources' on Instance uuid 08cb71f4-2ebe-4694-856c-2e772f319cdf {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 887.642850] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354200, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.296383} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.643206] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.643919] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43744737-95c4-40a5-b3ba-52ebf2dfe35b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.675158] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] 12c6b03b-8295-43de-898f-a6c35f1693b7/12c6b03b-8295-43de-898f-a6c35f1693b7.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.675158] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e0d3a3d-2a32-4163-a0d0-99b03d6aa7ee {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.699198] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354199, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.723475} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.701282] env[61629]: INFO nova.virt.vmwareapi.ds_util [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 0d21b352-bdd0-4887-8658-cd5c448352d2/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk. [ 887.701846] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Waiting for the task: (returnval){ [ 887.701846] env[61629]: value = "task-1354204" [ 887.701846] env[61629]: _type = "Task" [ 887.701846] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.702742] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b6a286-726f-45fc-bfbf-6cb269108cb3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.732882] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 0d21b352-bdd0-4887-8658-cd5c448352d2/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.737372] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3797b3e-6997-43f5-a6dc-82c9a462c51b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.753129] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354204, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.756722] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 887.756722] env[61629]: value = "task-1354205" [ 887.756722] env[61629]: _type = "Task" [ 887.756722] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.770544] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354205, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.935109] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "55f2d2fc-9404-422f-ba08-72e6e11a089f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.935538] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "55f2d2fc-9404-422f-ba08-72e6e11a089f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.937021] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "55f2d2fc-9404-422f-ba08-72e6e11a089f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.937021] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "55f2d2fc-9404-422f-ba08-72e6e11a089f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.937021] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "55f2d2fc-9404-422f-ba08-72e6e11a089f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.938406] env[61629]: INFO nova.compute.manager [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Terminating instance [ 887.940871] env[61629]: DEBUG nova.compute.manager [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 887.941071] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 887.941944] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea70329-afe2-4b81-b4d4-6af104d7cffe {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.951367] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 887.951721] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbbe8bed-c9a5-4e09-8a16-9818be9df787 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.959228] env[61629]: DEBUG oslo_vmware.api [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 887.959228] env[61629]: value = "task-1354206" [ 887.959228] env[61629]: _type = "Task" [ 887.959228] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.967902] env[61629]: DEBUG oslo_vmware.api [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354206, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.007265] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354203, 'name': CreateVM_Task, 'duration_secs': 0.414499} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.007545] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 888.009735] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.009896] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.011052] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 888.011052] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79492951-4eed-4afb-8e88-cbf6d232c451 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.018022] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for the task: (returnval){ [ 888.018022] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52301ae3-37b5-ffb2-a66d-816a7b7d5231" [ 888.018022] env[61629]: _type = "Task" [ 888.018022] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.023785] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52301ae3-37b5-ffb2-a66d-816a7b7d5231, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.087962] env[61629]: DEBUG nova.compute.manager [req-f4889ede-a307-4b3c-a2f7-5cf42ea323de req-b4834f68-8224-4b05-be3e-1d68f5148c67 service nova] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Received event network-vif-plugged-155236cd-5bf3-4503-8968-010a3af74156 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 888.088280] env[61629]: DEBUG oslo_concurrency.lockutils [req-f4889ede-a307-4b3c-a2f7-5cf42ea323de req-b4834f68-8224-4b05-be3e-1d68f5148c67 service nova] Acquiring lock "109ab664-3bb9-420e-a4a5-526277c60b96-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.088550] env[61629]: DEBUG oslo_concurrency.lockutils [req-f4889ede-a307-4b3c-a2f7-5cf42ea323de req-b4834f68-8224-4b05-be3e-1d68f5148c67 service nova] Lock "109ab664-3bb9-420e-a4a5-526277c60b96-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.088715] env[61629]: DEBUG oslo_concurrency.lockutils [req-f4889ede-a307-4b3c-a2f7-5cf42ea323de req-b4834f68-8224-4b05-be3e-1d68f5148c67 service nova] Lock "109ab664-3bb9-420e-a4a5-526277c60b96-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.088887] env[61629]: DEBUG nova.compute.manager [req-f4889ede-a307-4b3c-a2f7-5cf42ea323de req-b4834f68-8224-4b05-be3e-1d68f5148c67 service nova] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] No waiting events found dispatching network-vif-plugged-155236cd-5bf3-4503-8968-010a3af74156 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 888.089099] env[61629]: WARNING nova.compute.manager [req-f4889ede-a307-4b3c-a2f7-5cf42ea323de req-b4834f68-8224-4b05-be3e-1d68f5148c67 service nova] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Received unexpected event network-vif-plugged-155236cd-5bf3-4503-8968-010a3af74156 for instance with vm_state building and task_state spawning. [ 888.096454] env[61629]: DEBUG nova.compute.utils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 888.105801] env[61629]: DEBUG nova.compute.manager [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 888.106058] env[61629]: DEBUG nova.network.neutron [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 888.188939] env[61629]: DEBUG nova.policy [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec39705b9dd24915a0b3723ea45a85d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38efdd2cc07f45a49fb06d590aafb96b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 888.193592] env[61629]: DEBUG nova.network.neutron [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Successfully updated port: 155236cd-5bf3-4503-8968-010a3af74156 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.221272] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354204, 'name': ReconfigVM_Task, 'duration_secs': 0.430844} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.221603] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Reconfigured VM instance instance-0000004d to attach disk [datastore2] 12c6b03b-8295-43de-898f-a6c35f1693b7/12c6b03b-8295-43de-898f-a6c35f1693b7.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.222226] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee28349d-f2b9-4875-85e9-9316eae6d7ab {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.229902] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Waiting for the task: (returnval){ [ 888.229902] env[61629]: value = "task-1354207" [ 888.229902] env[61629]: _type = "Task" [ 888.229902] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.239116] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354207, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.267624] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354205, 'name': ReconfigVM_Task, 'duration_secs': 0.306624} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.271129] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 0d21b352-bdd0-4887-8658-cd5c448352d2/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.272377] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f6281f-216e-449a-8b38-fd975c11652c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.305613] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5c13e26-e345-44ea-86b8-9bb952bcef54 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.321543] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 888.321543] env[61629]: value = "task-1354208" [ 888.321543] env[61629]: _type = "Task" [ 888.321543] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.330014] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354208, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.472915] env[61629]: DEBUG oslo_vmware.api [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354206, 'name': PowerOffVM_Task, 'duration_secs': 0.191699} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.473130] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 888.473287] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 888.474580] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-058efd2d-43a2-4790-b0c7-b5a031f7a2a2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.477061] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26eac8bd-f275-426e-8c33-ab07e3113861 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.484032] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d95bdc2-848d-49dc-94b7-7e9174cde3b7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.520330] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254f5fc7-bfd3-46c4-8d7f-d7aaada0f1f8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.531266] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52301ae3-37b5-ffb2-a66d-816a7b7d5231, 'name': SearchDatastore_Task, 'duration_secs': 0.024921} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.533436] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.533803] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 888.534013] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.534283] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.534445] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 888.534836] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ff9dd92-1450-49e1-a923-5e2738a7ea42 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.539166] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-017aca17-a574-4d8d-a4b7-0293812dc5cd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.553877] env[61629]: DEBUG nova.compute.provider_tree [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.556223] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 888.556223] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 888.557593] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc1b4955-53ca-4d30-ad79-fcf7b1b8da7f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.563651] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for the task: (returnval){ [ 888.563651] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52275d54-a0ed-501c-379d-06c6c4577c2f" [ 888.563651] env[61629]: _type = "Task" [ 888.563651] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.572370] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52275d54-a0ed-501c-379d-06c6c4577c2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.606406] env[61629]: DEBUG nova.compute.manager [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 888.649440] env[61629]: DEBUG nova.network.neutron [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Successfully created port: ce03096a-81c3-496e-96ec-bb52e2ed7d48 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 888.696408] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Acquiring lock "refresh_cache-109ab664-3bb9-420e-a4a5-526277c60b96" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.696653] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Acquired lock "refresh_cache-109ab664-3bb9-420e-a4a5-526277c60b96" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.696891] env[61629]: DEBUG nova.network.neutron [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 888.743030] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354207, 'name': Rename_Task, 'duration_secs': 0.361622} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.743484] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.743689] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-263b2ddc-47dc-467c-8641-379445ada9e0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.750348] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Waiting for the task: (returnval){ [ 888.750348] env[61629]: value = "task-1354210" [ 888.750348] env[61629]: _type = "Task" [ 888.750348] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.757483] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354210, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.834553] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354208, 'name': ReconfigVM_Task, 'duration_secs': 0.219303} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.834553] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.834648] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-937083f0-80cd-42cb-b259-152b5ea49f3b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.842306] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 888.842579] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 888.842705] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleting the datastore file [datastore2] 55f2d2fc-9404-422f-ba08-72e6e11a089f {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 888.843971] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-863d2046-3fe4-4e2c-8210-604be3728d0e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.846091] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 888.846091] env[61629]: value = "task-1354211" [ 888.846091] env[61629]: _type = "Task" [ 888.846091] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.851633] env[61629]: DEBUG oslo_vmware.api [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 888.851633] env[61629]: value = "task-1354212" [ 888.851633] env[61629]: _type = "Task" [ 888.851633] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.859216] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354211, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.865829] env[61629]: DEBUG oslo_vmware.api [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354212, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.058966] env[61629]: DEBUG nova.scheduler.client.report [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 889.074956] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52275d54-a0ed-501c-379d-06c6c4577c2f, 'name': SearchDatastore_Task, 'duration_secs': 0.023716} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.075901] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d6bc30f-bb20-417d-b313-7a593dd7355f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.085537] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for the task: (returnval){ [ 889.085537] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]523298c3-766d-2fc5-8eac-234c9fb5aa8e" [ 889.085537] env[61629]: _type = "Task" [ 889.085537] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.098141] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]523298c3-766d-2fc5-8eac-234c9fb5aa8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.262853] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354210, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.360553] env[61629]: DEBUG oslo_vmware.api [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354211, 'name': PowerOnVM_Task, 'duration_secs': 0.435025} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.361670] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 889.366930] env[61629]: DEBUG oslo_vmware.api [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354212, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.295259} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.368122] env[61629]: DEBUG nova.compute.manager [None req-6797f55a-3643-4748-91cc-581b7076b28a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 889.368518] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 889.368807] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 889.369193] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 889.369478] env[61629]: INFO nova.compute.manager [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Took 1.43 seconds to destroy the instance on the hypervisor. [ 889.369809] env[61629]: DEBUG oslo.service.loopingcall [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 889.370608] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6c5404-e565-465d-8bd4-b7bbbecc6f46 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.374438] env[61629]: DEBUG nova.compute.manager [-] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 889.374438] env[61629]: DEBUG nova.network.neutron [-] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 889.428588] env[61629]: DEBUG nova.network.neutron [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 889.569806] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.978s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.576849] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.964s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.577383] env[61629]: DEBUG nova.objects.instance [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lazy-loading 'resources' on Instance uuid 68c1e93a-2829-4764-a900-75c3479b4715 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.598217] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.598329] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.598635] env[61629]: INFO nova.compute.manager [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Shelving [ 889.600271] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]523298c3-766d-2fc5-8eac-234c9fb5aa8e, 'name': SearchDatastore_Task, 'duration_secs': 0.016237} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.600967] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.601314] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 1d451558-dbbc-4942-b739-5d4b88057a75/1d451558-dbbc-4942-b739-5d4b88057a75.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 889.601654] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09db532c-fbae-459c-b52b-aae48f8f8988 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.612387] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for the task: (returnval){ [ 889.612387] env[61629]: value = "task-1354213" [ 889.612387] env[61629]: _type = "Task" [ 889.612387] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.620740] env[61629]: DEBUG nova.compute.manager [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 889.626771] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354213, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.628753] env[61629]: INFO nova.scheduler.client.report [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Deleted allocations for instance 08cb71f4-2ebe-4694-856c-2e772f319cdf [ 889.649263] env[61629]: DEBUG nova.virt.hardware [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 889.649492] env[61629]: DEBUG nova.virt.hardware [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 889.650260] env[61629]: DEBUG nova.virt.hardware [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 889.650540] env[61629]: DEBUG nova.virt.hardware [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 889.650670] env[61629]: DEBUG nova.virt.hardware [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 889.651225] env[61629]: DEBUG nova.virt.hardware [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 889.651426] env[61629]: DEBUG nova.virt.hardware [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 889.651593] env[61629]: DEBUG nova.virt.hardware [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 889.651778] env[61629]: DEBUG nova.virt.hardware [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 889.651962] env[61629]: DEBUG nova.virt.hardware [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 889.652210] env[61629]: DEBUG nova.virt.hardware [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 889.653172] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287c9ded-f511-472c-bb14-516c82311609 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.663141] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8bf80ee-7f5a-4847-8b6f-daa663e14efd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.705883] env[61629]: DEBUG nova.network.neutron [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Updating instance_info_cache with network_info: [{"id": "155236cd-5bf3-4503-8968-010a3af74156", "address": "fa:16:3e:7a:d2:97", "network": {"id": "c350eaaf-77d9-4ddc-976e-6e96a7da4b21", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1747811477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "092c2c52e82042dca2ce57b5f3d7ad2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap155236cd-5b", "ovs_interfaceid": "155236cd-5bf3-4503-8968-010a3af74156", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.762569] env[61629]: DEBUG oslo_vmware.api [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354210, 'name': PowerOnVM_Task, 'duration_secs': 0.87204} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.762569] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 889.762569] env[61629]: INFO nova.compute.manager [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Took 9.42 seconds to spawn the instance on the hypervisor. [ 889.762569] env[61629]: DEBUG nova.compute.manager [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 889.762569] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b552bf-befb-4c39-a4df-53b7ef6faa92 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.896225] env[61629]: DEBUG nova.compute.manager [req-ef741a07-f7c9-433a-a7f3-3102f41eddd9 req-73a815d1-64db-4680-b344-0942a2b08f6a service nova] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Received event network-vif-deleted-91c99a2f-21ee-45af-9321-329b2cfb1799 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 889.896468] env[61629]: INFO nova.compute.manager [req-ef741a07-f7c9-433a-a7f3-3102f41eddd9 req-73a815d1-64db-4680-b344-0942a2b08f6a service nova] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Neutron deleted interface 91c99a2f-21ee-45af-9321-329b2cfb1799; detaching it from the instance and deleting it from the info cache [ 889.896680] env[61629]: DEBUG nova.network.neutron [req-ef741a07-f7c9-433a-a7f3-3102f41eddd9 req-73a815d1-64db-4680-b344-0942a2b08f6a service nova] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.110071] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 890.110368] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9a1df62f-c3a4-4ef0-82fa-4ab47d7b8b41 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.128687] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 890.128687] env[61629]: value = "task-1354214" [ 890.128687] env[61629]: _type = "Task" [ 890.128687] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.139060] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354213, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.146988] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354214, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.150707] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ce6e1e9-fce9-42bf-9270-60dcfe6749d2 tempest-DeleteServersAdminTestJSON-126160695 tempest-DeleteServersAdminTestJSON-126160695-project-member] Lock "08cb71f4-2ebe-4694-856c-2e772f319cdf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.381s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.184693] env[61629]: DEBUG nova.network.neutron [-] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.209387] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Releasing lock "refresh_cache-109ab664-3bb9-420e-a4a5-526277c60b96" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.209387] env[61629]: DEBUG nova.compute.manager [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Instance network_info: |[{"id": "155236cd-5bf3-4503-8968-010a3af74156", "address": "fa:16:3e:7a:d2:97", "network": {"id": "c350eaaf-77d9-4ddc-976e-6e96a7da4b21", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1747811477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "092c2c52e82042dca2ce57b5f3d7ad2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap155236cd-5b", "ovs_interfaceid": "155236cd-5bf3-4503-8968-010a3af74156", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 890.209387] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:d2:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '255460d5-71d4-4bfd-87f1-acc10085db7f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '155236cd-5bf3-4503-8968-010a3af74156', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 890.217488] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Creating folder: Project (092c2c52e82042dca2ce57b5f3d7ad2e). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 890.220922] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-13248aeb-76ab-41c0-b15a-9f6905678fc5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.225274] env[61629]: DEBUG nova.compute.manager [req-775d5876-394f-49f0-ab35-01e78fca3382 req-8db19812-9327-42aa-926b-beb067ffda56 service nova] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Received event network-changed-155236cd-5bf3-4503-8968-010a3af74156 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 890.225556] env[61629]: DEBUG nova.compute.manager [req-775d5876-394f-49f0-ab35-01e78fca3382 req-8db19812-9327-42aa-926b-beb067ffda56 service nova] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Refreshing instance network info cache due to event network-changed-155236cd-5bf3-4503-8968-010a3af74156. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 890.226026] env[61629]: DEBUG oslo_concurrency.lockutils [req-775d5876-394f-49f0-ab35-01e78fca3382 req-8db19812-9327-42aa-926b-beb067ffda56 service nova] Acquiring lock "refresh_cache-109ab664-3bb9-420e-a4a5-526277c60b96" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.226026] env[61629]: DEBUG oslo_concurrency.lockutils [req-775d5876-394f-49f0-ab35-01e78fca3382 req-8db19812-9327-42aa-926b-beb067ffda56 service nova] Acquired lock "refresh_cache-109ab664-3bb9-420e-a4a5-526277c60b96" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.226275] env[61629]: DEBUG nova.network.neutron [req-775d5876-394f-49f0-ab35-01e78fca3382 req-8db19812-9327-42aa-926b-beb067ffda56 service nova] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Refreshing network info cache for port 155236cd-5bf3-4503-8968-010a3af74156 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 890.243223] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Created folder: Project (092c2c52e82042dca2ce57b5f3d7ad2e) in parent group-v288443. [ 890.243421] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Creating folder: Instances. Parent ref: group-v288514. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 890.244376] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a988728b-78a2-4cae-adce-3f99a11c628a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.256348] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Created folder: Instances in parent group-v288514. [ 890.257968] env[61629]: DEBUG oslo.service.loopingcall [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.257968] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 890.257968] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86797a41-e3cc-4390-a83c-bc034876d29b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.291626] env[61629]: INFO nova.compute.manager [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Took 27.43 seconds to build instance. [ 890.294931] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 890.294931] env[61629]: value = "task-1354217" [ 890.294931] env[61629]: _type = "Task" [ 890.294931] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.309256] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354217, 'name': CreateVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.403642] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5c9c51f1-d2d9-4780-818a-8b68051c3ac6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.414211] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8366028-8081-400c-a433-61e1f73f7a82 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.449380] env[61629]: DEBUG nova.compute.manager [req-ef741a07-f7c9-433a-a7f3-3102f41eddd9 req-73a815d1-64db-4680-b344-0942a2b08f6a service nova] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Detach interface failed, port_id=91c99a2f-21ee-45af-9321-329b2cfb1799, reason: Instance 55f2d2fc-9404-422f-ba08-72e6e11a089f could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 890.454998] env[61629]: DEBUG nova.network.neutron [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Successfully updated port: ce03096a-81c3-496e-96ec-bb52e2ed7d48 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 890.505091] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4924aa82-5d1f-4d65-827e-53119dfbd2b4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.519027] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479c73c8-d2dc-419d-bb55-8d6aac7f25cb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.558095] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c0e50b-e39e-4457-9e60-5cf856c5c948 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.567230] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56178bd-4660-4722-910a-9c76f742f377 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.581176] env[61629]: DEBUG nova.compute.provider_tree [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.629027] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354213, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.659826} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.629027] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 1d451558-dbbc-4942-b739-5d4b88057a75/1d451558-dbbc-4942-b739-5d4b88057a75.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 890.629027] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 890.629027] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ccefa91-507d-4f4d-a0f3-5654d484e17d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.643768] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for the task: (returnval){ [ 890.643768] env[61629]: value = "task-1354218" [ 890.643768] env[61629]: _type = "Task" [ 890.643768] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.653677] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354214, 'name': PowerOffVM_Task, 'duration_secs': 0.358655} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.653677] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 890.654740] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b10f7db-7e64-480a-bbcd-86574f096d17 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.662818] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354218, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.680266] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fad4118-afc5-45fd-aa8e-48419cdeb1c0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.688946] env[61629]: INFO nova.compute.manager [-] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Took 1.31 seconds to deallocate network for instance. [ 890.795980] env[61629]: DEBUG oslo_concurrency.lockutils [None req-eefd61b0-d8d1-4ce8-a1a3-c90f41535930 tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Lock "12c6b03b-8295-43de-898f-a6c35f1693b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.053s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.808020] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354217, 'name': CreateVM_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.890290] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "da1eb7f9-7562-40c8-955b-c11f831b7bc8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.890900] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "da1eb7f9-7562-40c8-955b-c11f831b7bc8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.891137] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "da1eb7f9-7562-40c8-955b-c11f831b7bc8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.891377] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "da1eb7f9-7562-40c8-955b-c11f831b7bc8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.891560] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "da1eb7f9-7562-40c8-955b-c11f831b7bc8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.893823] env[61629]: INFO nova.compute.manager [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Terminating instance [ 890.895828] env[61629]: DEBUG nova.compute.manager [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 890.896047] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 890.897013] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34801b6f-ee7e-4aeb-ac95-5b6bfcd6b2ce {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.906989] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 890.907383] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c261719-3874-49da-acb8-781c681e3603 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.913455] env[61629]: DEBUG oslo_vmware.api [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 890.913455] env[61629]: value = "task-1354219" [ 890.913455] env[61629]: _type = "Task" [ 890.913455] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.923959] env[61629]: DEBUG oslo_vmware.api [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354219, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.970288] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "refresh_cache-a42d5132-22e5-4551-80d2-fb7a55a7fa9e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.970994] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "refresh_cache-a42d5132-22e5-4551-80d2-fb7a55a7fa9e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.970994] env[61629]: DEBUG nova.network.neutron [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 891.087025] env[61629]: DEBUG nova.scheduler.client.report [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 891.154929] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354218, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.196806} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.155294] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 891.156321] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed8d5877-d490-40c2-8429-6074b5e92b75 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.181486] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 1d451558-dbbc-4942-b739-5d4b88057a75/1d451558-dbbc-4942-b739-5d4b88057a75.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 891.186019] env[61629]: DEBUG nova.network.neutron [req-775d5876-394f-49f0-ab35-01e78fca3382 req-8db19812-9327-42aa-926b-beb067ffda56 service nova] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Updated VIF entry in instance network info cache for port 155236cd-5bf3-4503-8968-010a3af74156. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 891.186019] env[61629]: DEBUG nova.network.neutron [req-775d5876-394f-49f0-ab35-01e78fca3382 req-8db19812-9327-42aa-926b-beb067ffda56 service nova] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Updating instance_info_cache with network_info: [{"id": "155236cd-5bf3-4503-8968-010a3af74156", "address": "fa:16:3e:7a:d2:97", "network": {"id": "c350eaaf-77d9-4ddc-976e-6e96a7da4b21", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1747811477-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "092c2c52e82042dca2ce57b5f3d7ad2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap155236cd-5b", "ovs_interfaceid": "155236cd-5bf3-4503-8968-010a3af74156", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.186019] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f458f267-8ba3-4fd8-b75f-daa4e71fa26b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.206480] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Creating Snapshot of the VM instance {{(pid=61629) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 891.207257] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.207725] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-baebddfa-5535-45d4-9512-32ab955ea110 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.215910] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for the task: (returnval){ [ 891.215910] env[61629]: value = "task-1354220" [ 891.215910] env[61629]: _type = "Task" [ 891.215910] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.217403] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 891.217403] env[61629]: value = "task-1354221" [ 891.217403] env[61629]: _type = "Task" [ 891.217403] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.235132] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354221, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.238194] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354220, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.307122] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354217, 'name': CreateVM_Task, 'duration_secs': 0.643835} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.307985] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 891.308900] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.309165] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.309540] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 891.309805] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97e1ba8d-c2ae-4b01-9019-c229bbcf7951 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.314871] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Waiting for the task: (returnval){ [ 891.314871] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]524a25f4-408f-762b-2994-d6b614ed0247" [ 891.314871] env[61629]: _type = "Task" [ 891.314871] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.325743] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]524a25f4-408f-762b-2994-d6b614ed0247, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.425801] env[61629]: DEBUG oslo_vmware.api [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354219, 'name': PowerOffVM_Task, 'duration_secs': 0.229738} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.427689] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 891.427689] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 891.427689] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca1d93d9-06e9-43cd-8f65-9cb7fc351e0c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.506807] env[61629]: DEBUG nova.network.neutron [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 891.565133] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 891.565394] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 891.565843] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Deleting the datastore file [datastore1] da1eb7f9-7562-40c8-955b-c11f831b7bc8 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 891.565955] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-542be35b-8b34-4290-907e-f60fa04bb2b7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.572817] env[61629]: DEBUG oslo_vmware.api [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 891.572817] env[61629]: value = "task-1354223" [ 891.572817] env[61629]: _type = "Task" [ 891.572817] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.581380] env[61629]: DEBUG oslo_vmware.api [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354223, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.592826] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.018s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.597179] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.715s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.599133] env[61629]: INFO nova.compute.claims [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 891.609813] env[61629]: DEBUG oslo_concurrency.lockutils [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Acquiring lock "12c6b03b-8295-43de-898f-a6c35f1693b7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.609813] env[61629]: DEBUG oslo_concurrency.lockutils [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Lock "12c6b03b-8295-43de-898f-a6c35f1693b7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.609813] env[61629]: DEBUG oslo_concurrency.lockutils [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Acquiring lock "12c6b03b-8295-43de-898f-a6c35f1693b7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.609813] env[61629]: DEBUG oslo_concurrency.lockutils [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Lock "12c6b03b-8295-43de-898f-a6c35f1693b7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.609813] env[61629]: DEBUG oslo_concurrency.lockutils [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Lock "12c6b03b-8295-43de-898f-a6c35f1693b7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.612380] env[61629]: INFO nova.compute.manager [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Terminating instance [ 891.619265] env[61629]: DEBUG nova.compute.manager [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 891.619265] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 891.620316] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa481ce-6679-49c4-8e27-de8dd0431777 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.629458] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 891.631926] env[61629]: INFO nova.scheduler.client.report [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Deleted allocations for instance 68c1e93a-2829-4764-a900-75c3479b4715 [ 891.632291] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae7875fb-6d60-4b9f-b137-d6169440dd7c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.643640] env[61629]: DEBUG oslo_vmware.api [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Waiting for the task: (returnval){ [ 891.643640] env[61629]: value = "task-1354224" [ 891.643640] env[61629]: _type = "Task" [ 891.643640] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.661322] env[61629]: DEBUG oslo_vmware.api [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.706493] env[61629]: DEBUG oslo_concurrency.lockutils [req-775d5876-394f-49f0-ab35-01e78fca3382 req-8db19812-9327-42aa-926b-beb067ffda56 service nova] Releasing lock "refresh_cache-109ab664-3bb9-420e-a4a5-526277c60b96" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.723767] env[61629]: DEBUG nova.network.neutron [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Updating instance_info_cache with network_info: [{"id": "ce03096a-81c3-496e-96ec-bb52e2ed7d48", "address": "fa:16:3e:cd:78:e6", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce03096a-81", "ovs_interfaceid": "ce03096a-81c3-496e-96ec-bb52e2ed7d48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.733201] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354220, 'name': ReconfigVM_Task, 'duration_secs': 0.418867} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.737732] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 1d451558-dbbc-4942-b739-5d4b88057a75/1d451558-dbbc-4942-b739-5d4b88057a75.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.738934] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6663afdc-e48f-4633-bc01-6003cf0d56d9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.747301] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354221, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.752578] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for the task: (returnval){ [ 891.752578] env[61629]: value = "task-1354225" [ 891.752578] env[61629]: _type = "Task" [ 891.752578] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.766971] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354225, 'name': Rename_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.829356] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]524a25f4-408f-762b-2994-d6b614ed0247, 'name': SearchDatastore_Task, 'duration_secs': 0.011043} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.829663] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.829895] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 891.830138] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.830284] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.830595] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 891.830725] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22a33821-c29b-46f7-80a0-7bcca96ab22c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.842159] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 891.842364] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 891.843274] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-851c3828-b216-433b-9b58-7cf808f0b6fb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.849865] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Waiting for the task: (returnval){ [ 891.849865] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52183c3e-1828-94ff-a930-1cbd751305b9" [ 891.849865] env[61629]: _type = "Task" [ 891.849865] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.868116] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52183c3e-1828-94ff-a930-1cbd751305b9, 'name': SearchDatastore_Task, 'duration_secs': 0.012209} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.869114] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15f7c500-70dd-475f-a902-d6eee52fd71c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.874829] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Waiting for the task: (returnval){ [ 891.874829] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52fd2cd4-50d3-6727-c3fc-d4d97f81010e" [ 891.874829] env[61629]: _type = "Task" [ 891.874829] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.883503] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52fd2cd4-50d3-6727-c3fc-d4d97f81010e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.082744] env[61629]: DEBUG oslo_vmware.api [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354223, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210208} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.083306] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.083306] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 892.083413] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 892.083550] env[61629]: INFO nova.compute.manager [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Took 1.19 seconds to destroy the instance on the hypervisor. [ 892.083783] env[61629]: DEBUG oslo.service.loopingcall [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.084267] env[61629]: DEBUG nova.compute.manager [-] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 892.084267] env[61629]: DEBUG nova.network.neutron [-] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 892.142938] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ec674e7-4008-4fe3-968c-9f66955b15cd tempest-SecurityGroupsTestJSON-646677483 tempest-SecurityGroupsTestJSON-646677483-project-member] Lock "68c1e93a-2829-4764-a900-75c3479b4715" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.490s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.159345] env[61629]: DEBUG oslo_vmware.api [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354224, 'name': PowerOffVM_Task, 'duration_secs': 0.201884} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.159604] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 892.159808] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 892.160185] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21058673-62a8-4e60-9991-4da1643b7bd3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.237416] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "refresh_cache-a42d5132-22e5-4551-80d2-fb7a55a7fa9e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.237797] env[61629]: DEBUG nova.compute.manager [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Instance network_info: |[{"id": "ce03096a-81c3-496e-96ec-bb52e2ed7d48", "address": "fa:16:3e:cd:78:e6", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce03096a-81", "ovs_interfaceid": "ce03096a-81c3-496e-96ec-bb52e2ed7d48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 892.238258] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:78:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce03096a-81c3-496e-96ec-bb52e2ed7d48', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 892.246361] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Creating folder: Project (38efdd2cc07f45a49fb06d590aafb96b). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 892.248251] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b144e4d6-394a-466a-8b3e-a23a7888f591 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.250180] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 892.250411] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 892.250620] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Deleting the datastore file [datastore2] 12c6b03b-8295-43de-898f-a6c35f1693b7 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 892.255565] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b369be4-705c-4294-9951-6155fb2d1a16 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.256097] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354221, 'name': CreateSnapshot_Task, 'duration_secs': 0.606725} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.259056] env[61629]: INFO nova.compute.manager [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Rescuing [ 892.259056] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "refresh_cache-87a1383f-d66b-4bde-b153-89ac62ff8390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.259056] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "refresh_cache-87a1383f-d66b-4bde-b153-89ac62ff8390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.259056] env[61629]: DEBUG nova.network.neutron [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 892.259717] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Created Snapshot of the VM instance {{(pid=61629) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 892.260921] env[61629]: DEBUG nova.compute.manager [req-63ee3f01-99d6-472f-a3bf-30e5fd4e9362 req-7a63c79a-dad0-4b37-9b06-c22ffb05c545 service nova] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Received event network-vif-plugged-ce03096a-81c3-496e-96ec-bb52e2ed7d48 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 892.261075] env[61629]: DEBUG oslo_concurrency.lockutils [req-63ee3f01-99d6-472f-a3bf-30e5fd4e9362 req-7a63c79a-dad0-4b37-9b06-c22ffb05c545 service nova] Acquiring lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.261303] env[61629]: DEBUG oslo_concurrency.lockutils [req-63ee3f01-99d6-472f-a3bf-30e5fd4e9362 req-7a63c79a-dad0-4b37-9b06-c22ffb05c545 service nova] Lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.261503] env[61629]: DEBUG oslo_concurrency.lockutils [req-63ee3f01-99d6-472f-a3bf-30e5fd4e9362 req-7a63c79a-dad0-4b37-9b06-c22ffb05c545 service nova] Lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.262213] env[61629]: DEBUG nova.compute.manager [req-63ee3f01-99d6-472f-a3bf-30e5fd4e9362 req-7a63c79a-dad0-4b37-9b06-c22ffb05c545 service nova] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] No waiting events found dispatching network-vif-plugged-ce03096a-81c3-496e-96ec-bb52e2ed7d48 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 892.262213] env[61629]: WARNING nova.compute.manager [req-63ee3f01-99d6-472f-a3bf-30e5fd4e9362 req-7a63c79a-dad0-4b37-9b06-c22ffb05c545 service nova] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Received unexpected event network-vif-plugged-ce03096a-81c3-496e-96ec-bb52e2ed7d48 for instance with vm_state building and task_state spawning. [ 892.262213] env[61629]: DEBUG nova.compute.manager [req-63ee3f01-99d6-472f-a3bf-30e5fd4e9362 req-7a63c79a-dad0-4b37-9b06-c22ffb05c545 service nova] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Received event network-changed-ce03096a-81c3-496e-96ec-bb52e2ed7d48 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 892.262213] env[61629]: DEBUG nova.compute.manager [req-63ee3f01-99d6-472f-a3bf-30e5fd4e9362 req-7a63c79a-dad0-4b37-9b06-c22ffb05c545 service nova] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Refreshing instance network info cache due to event network-changed-ce03096a-81c3-496e-96ec-bb52e2ed7d48. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 892.262438] env[61629]: DEBUG oslo_concurrency.lockutils [req-63ee3f01-99d6-472f-a3bf-30e5fd4e9362 req-7a63c79a-dad0-4b37-9b06-c22ffb05c545 service nova] Acquiring lock "refresh_cache-a42d5132-22e5-4551-80d2-fb7a55a7fa9e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.262609] env[61629]: DEBUG oslo_concurrency.lockutils [req-63ee3f01-99d6-472f-a3bf-30e5fd4e9362 req-7a63c79a-dad0-4b37-9b06-c22ffb05c545 service nova] Acquired lock "refresh_cache-a42d5132-22e5-4551-80d2-fb7a55a7fa9e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.262794] env[61629]: DEBUG nova.network.neutron [req-63ee3f01-99d6-472f-a3bf-30e5fd4e9362 req-7a63c79a-dad0-4b37-9b06-c22ffb05c545 service nova] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Refreshing network info cache for port ce03096a-81c3-496e-96ec-bb52e2ed7d48 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 892.269270] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b45d55-ca15-4670-aa28-2694a0061126 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.272266] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Created folder: Project (38efdd2cc07f45a49fb06d590aafb96b) in parent group-v288443. [ 892.272266] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Creating folder: Instances. Parent ref: group-v288518. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 892.274024] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3777b0c1-9b86-46c7-bb64-74233693cf4b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.276234] env[61629]: DEBUG oslo_vmware.api [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Waiting for the task: (returnval){ [ 892.276234] env[61629]: value = "task-1354228" [ 892.276234] env[61629]: _type = "Task" [ 892.276234] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.289652] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354225, 'name': Rename_Task, 'duration_secs': 0.186433} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.291213] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 892.291530] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Created folder: Instances in parent group-v288518. [ 892.291769] env[61629]: DEBUG oslo.service.loopingcall [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.291981] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61422c6d-ddd8-4c38-bd45-5fb2ed04622f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.293601] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 892.296480] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6200c57a-bc33-48dc-b991-c9956cc9f816 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.311986] env[61629]: DEBUG oslo_vmware.api [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.317597] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for the task: (returnval){ [ 892.317597] env[61629]: value = "task-1354230" [ 892.317597] env[61629]: _type = "Task" [ 892.317597] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.318849] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 892.318849] env[61629]: value = "task-1354231" [ 892.318849] env[61629]: _type = "Task" [ 892.318849] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.332769] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354230, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.332958] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354231, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.385072] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52fd2cd4-50d3-6727-c3fc-d4d97f81010e, 'name': SearchDatastore_Task, 'duration_secs': 0.010123} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.385369] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.385753] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 109ab664-3bb9-420e-a4a5-526277c60b96/109ab664-3bb9-420e-a4a5-526277c60b96.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 892.386036] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba09a90b-f217-42f6-9804-184d9c1a39bc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.394334] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Waiting for the task: (returnval){ [ 892.394334] env[61629]: value = "task-1354232" [ 892.394334] env[61629]: _type = "Task" [ 892.394334] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.404602] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354232, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.802562] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Creating linked-clone VM from snapshot {{(pid=61629) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 892.803023] env[61629]: DEBUG oslo_vmware.api [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Task: {'id': task-1354228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.315344} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.807576] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a0b80e63-377f-4b88-889f-7bdb7e185378 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.811727] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.811979] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 892.812243] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 892.816026] env[61629]: INFO nova.compute.manager [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Took 1.19 seconds to destroy the instance on the hypervisor. [ 892.816026] env[61629]: DEBUG oslo.service.loopingcall [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.816026] env[61629]: DEBUG nova.compute.manager [-] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 892.816026] env[61629]: DEBUG nova.network.neutron [-] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 892.821842] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 892.821842] env[61629]: value = "task-1354233" [ 892.821842] env[61629]: _type = "Task" [ 892.821842] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.843967] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354230, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.849283] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354233, 'name': CloneVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.849517] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354231, 'name': CreateVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.912544] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354232, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.006694] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ae0076-5471-4f66-9b35-22cdd7ae0aef {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.014800] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc502cb-9fca-418f-8728-4b6c1d6b0f1e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.048374] env[61629]: DEBUG nova.network.neutron [-] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.055692] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df06c93e-0fe5-425b-abd4-878dea74357b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.065278] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db75b31-f6e6-4948-95c1-ad2ddd60d4ca {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.081329] env[61629]: DEBUG nova.compute.provider_tree [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.336332] env[61629]: DEBUG oslo_vmware.api [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354230, 'name': PowerOnVM_Task, 'duration_secs': 0.912924} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.337725] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 893.337951] env[61629]: INFO nova.compute.manager [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Took 10.49 seconds to spawn the instance on the hypervisor. [ 893.338146] env[61629]: DEBUG nova.compute.manager [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 893.340140] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f515f800-abc1-454e-8aa6-e6d28cb14cb3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.348737] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354231, 'name': CreateVM_Task, 'duration_secs': 0.570804} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.349019] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354233, 'name': CloneVM_Task} progress is 94%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.349606] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 893.350372] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.351730] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.351730] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 893.351730] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-464e66a5-ee5e-4067-9995-62c1406849d3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.360493] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 893.360493] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5207fdd5-305e-0f6b-473e-8056576a67b2" [ 893.360493] env[61629]: _type = "Task" [ 893.360493] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.369065] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5207fdd5-305e-0f6b-473e-8056576a67b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.406072] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354232, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722199} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.406072] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 109ab664-3bb9-420e-a4a5-526277c60b96/109ab664-3bb9-420e-a4a5-526277c60b96.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 893.406072] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 893.406072] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07788056-f7e1-4e1c-bb19-1cfac8c9dd6f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.415845] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Waiting for the task: (returnval){ [ 893.415845] env[61629]: value = "task-1354234" [ 893.415845] env[61629]: _type = "Task" [ 893.415845] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.424777] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354234, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.464066] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Acquiring lock "cd165a78-21f9-4fc7-88e5-5ab35047eacc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.464066] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Lock "cd165a78-21f9-4fc7-88e5-5ab35047eacc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.518907] env[61629]: DEBUG nova.network.neutron [req-63ee3f01-99d6-472f-a3bf-30e5fd4e9362 req-7a63c79a-dad0-4b37-9b06-c22ffb05c545 service nova] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Updated VIF entry in instance network info cache for port ce03096a-81c3-496e-96ec-bb52e2ed7d48. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 893.519351] env[61629]: DEBUG nova.network.neutron [req-63ee3f01-99d6-472f-a3bf-30e5fd4e9362 req-7a63c79a-dad0-4b37-9b06-c22ffb05c545 service nova] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Updating instance_info_cache with network_info: [{"id": "ce03096a-81c3-496e-96ec-bb52e2ed7d48", "address": "fa:16:3e:cd:78:e6", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce03096a-81", "ovs_interfaceid": "ce03096a-81c3-496e-96ec-bb52e2ed7d48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.524637] env[61629]: DEBUG nova.network.neutron [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Updating instance_info_cache with network_info: [{"id": "bff06c9b-54d2-4109-b2de-70fbab2c58d4", "address": "fa:16:3e:7c:c6:f7", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbff06c9b-54", "ovs_interfaceid": "bff06c9b-54d2-4109-b2de-70fbab2c58d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.555218] env[61629]: INFO nova.compute.manager [-] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Took 1.47 seconds to deallocate network for instance. [ 893.584520] env[61629]: DEBUG nova.scheduler.client.report [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 893.634774] env[61629]: DEBUG nova.compute.manager [req-4c6ba48d-c64b-41e8-871f-a2df92da66fe req-dbf9dea3-c422-40f2-afbf-b7d283bd59cf service nova] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Received event network-vif-deleted-75a8ffd8-5a4a-4846-8213-980da8efc581 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 893.638020] env[61629]: INFO nova.compute.manager [req-4c6ba48d-c64b-41e8-871f-a2df92da66fe req-dbf9dea3-c422-40f2-afbf-b7d283bd59cf service nova] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Neutron deleted interface 75a8ffd8-5a4a-4846-8213-980da8efc581; detaching it from the instance and deleting it from the info cache [ 893.638020] env[61629]: DEBUG nova.network.neutron [req-4c6ba48d-c64b-41e8-871f-a2df92da66fe req-dbf9dea3-c422-40f2-afbf-b7d283bd59cf service nova] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.758084] env[61629]: DEBUG nova.network.neutron [-] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.838204] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354233, 'name': CloneVM_Task} progress is 94%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.872831] env[61629]: INFO nova.compute.manager [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Took 30.12 seconds to build instance. [ 893.878519] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5207fdd5-305e-0f6b-473e-8056576a67b2, 'name': SearchDatastore_Task, 'duration_secs': 0.028325} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.879075] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.879293] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 893.879544] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.879695] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.879874] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.883211] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8713db8-575a-4621-bf6e-fd0eaedadf34 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.895812] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.895812] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 893.896199] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c66a00e-ca3c-45e7-b8d6-6b2ce14b5303 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.902677] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 893.902677] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]528caa53-9e85-5b35-8f58-12ce8e4086c3" [ 893.902677] env[61629]: _type = "Task" [ 893.902677] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.912042] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]528caa53-9e85-5b35-8f58-12ce8e4086c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.924331] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354234, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.358665} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.925040] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 893.925595] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74a695f-bc63-4796-aabf-31bbbbead43c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.956017] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 109ab664-3bb9-420e-a4a5-526277c60b96/109ab664-3bb9-420e-a4a5-526277c60b96.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 893.956017] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29142da5-7609-49a8-8007-e4fbcc5441e0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.969517] env[61629]: DEBUG nova.compute.manager [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 893.983144] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Waiting for the task: (returnval){ [ 893.983144] env[61629]: value = "task-1354235" [ 893.983144] env[61629]: _type = "Task" [ 893.983144] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.993812] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354235, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.022634] env[61629]: DEBUG oslo_concurrency.lockutils [req-63ee3f01-99d6-472f-a3bf-30e5fd4e9362 req-7a63c79a-dad0-4b37-9b06-c22ffb05c545 service nova] Releasing lock "refresh_cache-a42d5132-22e5-4551-80d2-fb7a55a7fa9e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.030716] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "refresh_cache-87a1383f-d66b-4bde-b153-89ac62ff8390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.066340] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.089362] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.089727] env[61629]: DEBUG nova.compute.manager [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 894.092899] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.658s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.094958] env[61629]: INFO nova.compute.claims [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.139202] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-661cb788-edfe-4eef-a9e6-fed1c5ce8c48 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.151652] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dfe830c-cd14-4c4a-8273-00972b931b62 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.182769] env[61629]: DEBUG nova.compute.manager [req-4c6ba48d-c64b-41e8-871f-a2df92da66fe req-dbf9dea3-c422-40f2-afbf-b7d283bd59cf service nova] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Detach interface failed, port_id=75a8ffd8-5a4a-4846-8213-980da8efc581, reason: Instance 12c6b03b-8295-43de-898f-a6c35f1693b7 could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 894.260591] env[61629]: INFO nova.compute.manager [-] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Took 1.45 seconds to deallocate network for instance. [ 894.337941] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354233, 'name': CloneVM_Task} progress is 94%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.380424] env[61629]: DEBUG oslo_concurrency.lockutils [None req-bbe9ea64-3aec-4a7c-97b0-4941561eb82b tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "1d451558-dbbc-4942-b739-5d4b88057a75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.120s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.416275] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]528caa53-9e85-5b35-8f58-12ce8e4086c3, 'name': SearchDatastore_Task, 'duration_secs': 0.01338} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.416275] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-555c4c9a-26fc-41fc-97c4-2b29b5d5bbe8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.421444] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 894.421444] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52fc8e58-c00c-ea25-1577-9602e565ba6c" [ 894.421444] env[61629]: _type = "Task" [ 894.421444] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.430306] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52fc8e58-c00c-ea25-1577-9602e565ba6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.451660] env[61629]: DEBUG nova.compute.manager [req-5962c2f1-2ade-4009-b652-93587d828262 req-a9074bff-30e0-467b-a013-6586a42a313b service nova] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Received event network-vif-deleted-f083b4ff-bb03-4d2c-90b7-524af188ccb0 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 894.492752] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354235, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.501678] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.570874] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 894.571403] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a3ec332-6747-49aa-9691-87b5bbc298d6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.579015] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 894.579015] env[61629]: value = "task-1354236" [ 894.579015] env[61629]: _type = "Task" [ 894.579015] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.588456] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354236, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.594948] env[61629]: DEBUG nova.compute.utils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 894.598459] env[61629]: DEBUG nova.compute.manager [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 894.598459] env[61629]: DEBUG nova.network.neutron [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 894.725036] env[61629]: DEBUG nova.policy [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bdffb48ef3e14d7994bb9709b1ce3987', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a35cec60cf464a1c9f8215dbc6403a84', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 894.767957] env[61629]: DEBUG oslo_concurrency.lockutils [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.842353] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354233, 'name': CloneVM_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.935127] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52fc8e58-c00c-ea25-1577-9602e565ba6c, 'name': SearchDatastore_Task, 'duration_secs': 0.025666} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.935127] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.935283] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] a42d5132-22e5-4551-80d2-fb7a55a7fa9e/a42d5132-22e5-4551-80d2-fb7a55a7fa9e.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 894.935572] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2248a560-438a-4b4d-acb5-aa1eedab6edc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.943133] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 894.943133] env[61629]: value = "task-1354237" [ 894.943133] env[61629]: _type = "Task" [ 894.943133] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.952723] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354237, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.994573] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354235, 'name': ReconfigVM_Task, 'duration_secs': 0.710388} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.999019] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 109ab664-3bb9-420e-a4a5-526277c60b96/109ab664-3bb9-420e-a4a5-526277c60b96.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 894.999019] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8090949-6a21-45c2-a4f9-b776f5a6c900 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.003346] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Waiting for the task: (returnval){ [ 895.003346] env[61629]: value = "task-1354238" [ 895.003346] env[61629]: _type = "Task" [ 895.003346] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.016853] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354238, 'name': Rename_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.033299] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.033447] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.091768] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354236, 'name': PowerOffVM_Task, 'duration_secs': 0.473816} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.092082] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 895.092919] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f207dc61-a281-4bdc-afc0-416f4d02a348 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.101698] env[61629]: DEBUG nova.compute.manager [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 895.127187] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e48fffe-f302-4cac-9f9c-0542512a7544 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.170452] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 895.171000] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99c8c073-3893-4e6c-8003-d5970d80df45 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.180759] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 895.180759] env[61629]: value = "task-1354239" [ 895.180759] env[61629]: _type = "Task" [ 895.180759] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.198606] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] VM already powered off {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 895.198866] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 895.199156] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.199315] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.199509] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.200016] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3cbaa1e3-70e0-43df-ac2b-57754362d1e2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.213077] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.213077] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 895.214625] env[61629]: DEBUG nova.network.neutron [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Successfully created port: 5bb08edd-3639-401f-9e54-26abd98b246e {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 895.218152] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2822a74-1d88-4dee-935a-8671d0c28f0e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.235747] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 895.235747] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]526531e2-dc47-24d6-596e-ac974bd2b498" [ 895.235747] env[61629]: _type = "Task" [ 895.235747] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.244815] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526531e2-dc47-24d6-596e-ac974bd2b498, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.290671] env[61629]: DEBUG nova.compute.manager [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 895.291854] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aacc96e-a53a-4fbf-870d-bc553b986c49 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.346201] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354233, 'name': CloneVM_Task, 'duration_secs': 2.048267} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.346201] env[61629]: INFO nova.virt.vmwareapi.vmops [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Created linked-clone VM from snapshot [ 895.346528] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d13843-5995-4b1a-ac80-06785f37215a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.356342] env[61629]: DEBUG nova.virt.vmwareapi.images [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Uploading image 1e2172a0-89df-4cab-a61f-a1c2288e9094 {{(pid=61629) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 895.385795] env[61629]: DEBUG oslo_vmware.rw_handles [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 895.385795] env[61629]: value = "vm-288521" [ 895.385795] env[61629]: _type = "VirtualMachine" [ 895.385795] env[61629]: }. {{(pid=61629) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 895.386150] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2cec69fc-610d-46c8-9ad9-bb8da84b2a5c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.399162] env[61629]: DEBUG oslo_vmware.rw_handles [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lease: (returnval){ [ 895.399162] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5259d018-06aa-8ee3-b656-b5291f80af9b" [ 895.399162] env[61629]: _type = "HttpNfcLease" [ 895.399162] env[61629]: } obtained for exporting VM: (result){ [ 895.399162] env[61629]: value = "vm-288521" [ 895.399162] env[61629]: _type = "VirtualMachine" [ 895.399162] env[61629]: }. {{(pid=61629) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 895.399843] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the lease: (returnval){ [ 895.399843] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5259d018-06aa-8ee3-b656-b5291f80af9b" [ 895.399843] env[61629]: _type = "HttpNfcLease" [ 895.399843] env[61629]: } to be ready. {{(pid=61629) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 895.414313] env[61629]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 895.414313] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5259d018-06aa-8ee3-b656-b5291f80af9b" [ 895.414313] env[61629]: _type = "HttpNfcLease" [ 895.414313] env[61629]: } is initializing. {{(pid=61629) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 895.459523] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354237, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.518638] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354238, 'name': Rename_Task, 'duration_secs': 0.162675} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.518731] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 895.519606] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1aa1e51b-5aa9-48f6-a94a-4d5b557b19b5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.528176] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Waiting for the task: (returnval){ [ 895.528176] env[61629]: value = "task-1354241" [ 895.528176] env[61629]: _type = "Task" [ 895.528176] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.538321] env[61629]: DEBUG nova.compute.manager [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 895.541660] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354241, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.555933] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9fd32f-5720-40ba-9de0-4630a8c3f828 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.569980] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf6c0bf-086e-410b-a904-7c5b8db13e13 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.606775] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3878b1-89e4-429c-a181-89eee92734ba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.616139] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d6645c-2a7c-49d1-9b98-461daa208298 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.637438] env[61629]: DEBUG nova.compute.provider_tree [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.749877] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526531e2-dc47-24d6-596e-ac974bd2b498, 'name': SearchDatastore_Task, 'duration_secs': 0.020253} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.749877] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25706301-0564-4bad-99fb-1b4a0b8a7e1b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.755785] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 895.755785] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52529b13-6ecc-1c72-746c-a897a9ac6cab" [ 895.755785] env[61629]: _type = "Task" [ 895.755785] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.762483] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52529b13-6ecc-1c72-746c-a897a9ac6cab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.808036] env[61629]: INFO nova.compute.manager [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] instance snapshotting [ 895.811214] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84ce4e0-18a9-41de-952d-9a62d4683440 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.831380] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8413040-45a7-4a4e-9297-a34287adef6a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.910570] env[61629]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 895.910570] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5259d018-06aa-8ee3-b656-b5291f80af9b" [ 895.910570] env[61629]: _type = "HttpNfcLease" [ 895.910570] env[61629]: } is ready. {{(pid=61629) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 895.911350] env[61629]: DEBUG oslo_vmware.rw_handles [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 895.911350] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5259d018-06aa-8ee3-b656-b5291f80af9b" [ 895.911350] env[61629]: _type = "HttpNfcLease" [ 895.911350] env[61629]: }. {{(pid=61629) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 895.911981] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99733a72-a38f-4f45-a525-a824fb68c4c5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.925425] env[61629]: DEBUG oslo_vmware.rw_handles [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c0e2b5-70a5-6ed5-686a-8b4ad3c90313/disk-0.vmdk from lease info. {{(pid=61629) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 895.925425] env[61629]: DEBUG oslo_vmware.rw_handles [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c0e2b5-70a5-6ed5-686a-8b4ad3c90313/disk-0.vmdk for reading. {{(pid=61629) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 895.992376] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354237, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.624327} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.992690] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] a42d5132-22e5-4551-80d2-fb7a55a7fa9e/a42d5132-22e5-4551-80d2-fb7a55a7fa9e.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 895.992937] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 895.993224] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1aec86e2-e8da-441c-bfa5-868453739014 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.000092] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 896.000092] env[61629]: value = "task-1354242" [ 896.000092] env[61629]: _type = "Task" [ 896.000092] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.011135] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354242, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.037818] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354241, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.040646] env[61629]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e2d0bd04-4df8-49d5-b641-66c196563ae8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.064865] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.137904] env[61629]: DEBUG nova.compute.manager [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 896.140916] env[61629]: DEBUG nova.scheduler.client.report [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 896.182648] env[61629]: DEBUG nova.virt.hardware [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 896.184328] env[61629]: DEBUG nova.virt.hardware [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 896.184328] env[61629]: DEBUG nova.virt.hardware [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.187288] env[61629]: DEBUG nova.virt.hardware [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 896.187505] env[61629]: DEBUG nova.virt.hardware [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.187681] env[61629]: DEBUG nova.virt.hardware [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 896.187912] env[61629]: DEBUG nova.virt.hardware [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 896.188092] env[61629]: DEBUG nova.virt.hardware [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 896.188264] env[61629]: DEBUG nova.virt.hardware [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 896.188429] env[61629]: DEBUG nova.virt.hardware [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 896.188605] env[61629]: DEBUG nova.virt.hardware [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 896.190088] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c271825-baa7-4aaa-9ff7-13e1a917a32f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.200803] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd6ce2f-d318-48d1-9e7a-ae92e0e3c1e5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.265710] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52529b13-6ecc-1c72-746c-a897a9ac6cab, 'name': SearchDatastore_Task, 'duration_secs': 0.014074} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.265997] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.266284] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 87a1383f-d66b-4bde-b153-89ac62ff8390/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk. {{(pid=61629) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 896.266559] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dad793b4-3df4-4e55-a4c7-903f4fb9a704 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.276441] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 896.276441] env[61629]: value = "task-1354243" [ 896.276441] env[61629]: _type = "Task" [ 896.276441] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.283762] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.343440] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Creating Snapshot of the VM instance {{(pid=61629) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 896.343796] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-cb24880e-bd92-4027-bea2-70f426025b71 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.351143] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for the task: (returnval){ [ 896.351143] env[61629]: value = "task-1354244" [ 896.351143] env[61629]: _type = "Task" [ 896.351143] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.360557] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354244, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.514767] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354242, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104715} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.515356] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 896.516981] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c55fb8-0567-43a0-a7ee-b2ed8dca1b66 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.543674] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] a42d5132-22e5-4551-80d2-fb7a55a7fa9e/a42d5132-22e5-4551-80d2-fb7a55a7fa9e.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 896.547859] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1857d9d4-8d7d-4842-8a70-e16544e3f782 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.573569] env[61629]: DEBUG oslo_vmware.api [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354241, 'name': PowerOnVM_Task, 'duration_secs': 0.690889} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.575206] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 896.575305] env[61629]: INFO nova.compute.manager [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Took 9.39 seconds to spawn the instance on the hypervisor. [ 896.575504] env[61629]: DEBUG nova.compute.manager [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 896.575849] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 896.575849] env[61629]: value = "task-1354245" [ 896.575849] env[61629]: _type = "Task" [ 896.575849] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.577313] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36df7b45-9ba8-4310-997d-6332084f5630 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.595285] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354245, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.646249] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.552s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.646249] env[61629]: DEBUG nova.compute.manager [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 896.649141] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.714s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.651610] env[61629]: INFO nova.compute.claims [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.786054] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354243, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.865209] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354244, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.088228] env[61629]: DEBUG nova.compute.manager [req-73db42a4-1388-495a-8395-d764523f4212 req-f339d729-84c8-4c62-bbf0-3f30624746a4 service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Received event network-vif-plugged-5bb08edd-3639-401f-9e54-26abd98b246e {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 897.088868] env[61629]: DEBUG oslo_concurrency.lockutils [req-73db42a4-1388-495a-8395-d764523f4212 req-f339d729-84c8-4c62-bbf0-3f30624746a4 service nova] Acquiring lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.089126] env[61629]: DEBUG oslo_concurrency.lockutils [req-73db42a4-1388-495a-8395-d764523f4212 req-f339d729-84c8-4c62-bbf0-3f30624746a4 service nova] Lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.091560] env[61629]: DEBUG oslo_concurrency.lockutils [req-73db42a4-1388-495a-8395-d764523f4212 req-f339d729-84c8-4c62-bbf0-3f30624746a4 service nova] Lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.091560] env[61629]: DEBUG nova.compute.manager [req-73db42a4-1388-495a-8395-d764523f4212 req-f339d729-84c8-4c62-bbf0-3f30624746a4 service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] No waiting events found dispatching network-vif-plugged-5bb08edd-3639-401f-9e54-26abd98b246e {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 897.091560] env[61629]: WARNING nova.compute.manager [req-73db42a4-1388-495a-8395-d764523f4212 req-f339d729-84c8-4c62-bbf0-3f30624746a4 service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Received unexpected event network-vif-plugged-5bb08edd-3639-401f-9e54-26abd98b246e for instance with vm_state building and task_state spawning. [ 897.094494] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354245, 'name': ReconfigVM_Task, 'duration_secs': 0.507879} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.095187] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Reconfigured VM instance instance-00000050 to attach disk [datastore1] a42d5132-22e5-4551-80d2-fb7a55a7fa9e/a42d5132-22e5-4551-80d2-fb7a55a7fa9e.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 897.095998] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1542e50-2f94-4e09-883d-5c854ca7994a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.108388] env[61629]: INFO nova.compute.manager [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Took 28.54 seconds to build instance. [ 897.113507] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 897.113507] env[61629]: value = "task-1354246" [ 897.113507] env[61629]: _type = "Task" [ 897.113507] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.124042] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354246, 'name': Rename_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.158791] env[61629]: DEBUG nova.compute.utils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 897.160621] env[61629]: DEBUG nova.compute.manager [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 897.160950] env[61629]: DEBUG nova.network.neutron [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 897.275303] env[61629]: DEBUG nova.policy [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be81178f7a914988a54581c283e2e76a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6d1f876ee054beb89ca0eb0776ddcd5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 897.290593] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354243, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.661679} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.292437] env[61629]: INFO nova.virt.vmwareapi.ds_util [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 87a1383f-d66b-4bde-b153-89ac62ff8390/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk. [ 897.293897] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72546a8-66cf-4931-985c-62e3a493f7e8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.322890] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 87a1383f-d66b-4bde-b153-89ac62ff8390/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 897.326340] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35270975-585f-4211-a63e-c685e7e9ff49 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.345466] env[61629]: DEBUG nova.network.neutron [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Successfully updated port: 5bb08edd-3639-401f-9e54-26abd98b246e {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 897.352889] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 897.352889] env[61629]: value = "task-1354247" [ 897.352889] env[61629]: _type = "Task" [ 897.352889] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.367743] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354244, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.373099] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354247, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.611085] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a18986e-b7c7-4c85-b805-9f88abc9e6b6 tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Lock "109ab664-3bb9-420e-a4a5-526277c60b96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.108s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.623116] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354246, 'name': Rename_Task, 'duration_secs': 0.305856} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.623643] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 897.623994] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2313b08d-698b-4ba3-8491-5da088de3b95 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.631917] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 897.631917] env[61629]: value = "task-1354248" [ 897.631917] env[61629]: _type = "Task" [ 897.631917] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.641500] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354248, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.666403] env[61629]: DEBUG nova.compute.manager [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 897.780406] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Acquiring lock "109ab664-3bb9-420e-a4a5-526277c60b96" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.780530] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Lock "109ab664-3bb9-420e-a4a5-526277c60b96" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.780787] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Acquiring lock "109ab664-3bb9-420e-a4a5-526277c60b96-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.780982] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Lock "109ab664-3bb9-420e-a4a5-526277c60b96-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.781191] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Lock "109ab664-3bb9-420e-a4a5-526277c60b96-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.793177] env[61629]: INFO nova.compute.manager [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Terminating instance [ 897.795256] env[61629]: DEBUG nova.compute.manager [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 897.795460] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 897.797122] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7bca765-662d-4232-b97b-c75d4da21f9a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.810585] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 897.811152] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-977b98a0-e27b-48de-857c-a1a250bd935d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.816945] env[61629]: DEBUG oslo_vmware.api [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Waiting for the task: (returnval){ [ 897.816945] env[61629]: value = "task-1354249" [ 897.816945] env[61629]: _type = "Task" [ 897.816945] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.829997] env[61629]: DEBUG oslo_vmware.api [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354249, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.831494] env[61629]: DEBUG nova.network.neutron [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Successfully created port: e28dd480-831a-49f0-804e-ad88763d3c24 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 897.851348] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "refresh_cache-274e3437-eacd-4299-9c27-97bbb0ebf1c1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.851633] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquired lock "refresh_cache-274e3437-eacd-4299-9c27-97bbb0ebf1c1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.852022] env[61629]: DEBUG nova.network.neutron [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.876156] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354247, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.880696] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354244, 'name': CreateSnapshot_Task, 'duration_secs': 1.422401} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.881300] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Created Snapshot of the VM instance {{(pid=61629) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 897.882364] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807f23cd-0bfb-4ec0-a6b0-d157f5d7c8a3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.917094] env[61629]: DEBUG nova.network.neutron [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.032394] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f19bb8-1d47-4755-ab9d-61d191f397ff {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.045161] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf3f6642-9603-440c-8849-893614cf0c54 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.084559] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6088b0-45d1-477e-b028-b4d1d373e5e2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.093844] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a9dcf3-c455-43ef-88a1-9086aff903d0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.110408] env[61629]: DEBUG nova.compute.provider_tree [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.143832] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354248, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.226214] env[61629]: DEBUG nova.network.neutron [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Updating instance_info_cache with network_info: [{"id": "5bb08edd-3639-401f-9e54-26abd98b246e", "address": "fa:16:3e:19:1e:55", "network": {"id": "a022be65-b398-460a-a741-9190e3d7e38d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1776770394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a35cec60cf464a1c9f8215dbc6403a84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bb08edd-36", "ovs_interfaceid": "5bb08edd-3639-401f-9e54-26abd98b246e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.330723] env[61629]: DEBUG oslo_vmware.api [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354249, 'name': PowerOffVM_Task, 'duration_secs': 0.254275} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.331037] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 898.331600] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 898.331600] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7617cec-6cdd-45b9-8905-b8e19aae07e2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.362833] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354247, 'name': ReconfigVM_Task, 'duration_secs': 0.746009} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.363836] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 87a1383f-d66b-4bde-b153-89ac62ff8390/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 898.364951] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174273ef-4eca-497e-b252-db59b1733543 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.390313] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-636253a1-ca57-4e90-9508-ce441a5045ac {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.401867] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 898.402111] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 898.402300] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Deleting the datastore file [datastore1] 109ab664-3bb9-420e-a4a5-526277c60b96 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 898.409768] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Creating linked-clone VM from snapshot {{(pid=61629) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 898.410111] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c30bf14f-44c2-4b6e-aa50-d7c6b37987fe {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.413444] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a0aadd7f-0081-4885-a8f2-14a4d2e7ffb5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.418646] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 898.418646] env[61629]: value = "task-1354251" [ 898.418646] env[61629]: _type = "Task" [ 898.418646] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.425492] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for the task: (returnval){ [ 898.425492] env[61629]: value = "task-1354253" [ 898.425492] env[61629]: _type = "Task" [ 898.425492] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.425492] env[61629]: DEBUG oslo_vmware.api [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Waiting for the task: (returnval){ [ 898.425492] env[61629]: value = "task-1354252" [ 898.425492] env[61629]: _type = "Task" [ 898.425492] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.435126] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.441336] env[61629]: DEBUG oslo_vmware.api [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354252, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.444709] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354253, 'name': CloneVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.614398] env[61629]: DEBUG nova.scheduler.client.report [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 898.645209] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354248, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.676359] env[61629]: DEBUG nova.compute.manager [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 898.703278] env[61629]: DEBUG nova.virt.hardware [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 898.703557] env[61629]: DEBUG nova.virt.hardware [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 898.703718] env[61629]: DEBUG nova.virt.hardware [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 898.703903] env[61629]: DEBUG nova.virt.hardware [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 898.704067] env[61629]: DEBUG nova.virt.hardware [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 898.704228] env[61629]: DEBUG nova.virt.hardware [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 898.704448] env[61629]: DEBUG nova.virt.hardware [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 898.704605] env[61629]: DEBUG nova.virt.hardware [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 898.704885] env[61629]: DEBUG nova.virt.hardware [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 898.705103] env[61629]: DEBUG nova.virt.hardware [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 898.705284] env[61629]: DEBUG nova.virt.hardware [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 898.706572] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06e4377-7ad1-44e8-b833-33df0a359226 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.715568] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37511465-e42d-4acd-bd7f-79a5f1af4ad2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.730627] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Releasing lock "refresh_cache-274e3437-eacd-4299-9c27-97bbb0ebf1c1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.730966] env[61629]: DEBUG nova.compute.manager [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Instance network_info: |[{"id": "5bb08edd-3639-401f-9e54-26abd98b246e", "address": "fa:16:3e:19:1e:55", "network": {"id": "a022be65-b398-460a-a741-9190e3d7e38d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1776770394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a35cec60cf464a1c9f8215dbc6403a84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bb08edd-36", "ovs_interfaceid": "5bb08edd-3639-401f-9e54-26abd98b246e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 898.731639] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:1e:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5bb08edd-3639-401f-9e54-26abd98b246e', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 898.739443] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Creating folder: Project (a35cec60cf464a1c9f8215dbc6403a84). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 898.739759] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3492277-83e5-4e02-ba4f-889ea9e07783 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.751430] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Created folder: Project (a35cec60cf464a1c9f8215dbc6403a84) in parent group-v288443. [ 898.752033] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Creating folder: Instances. Parent ref: group-v288524. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 898.752132] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e80d1b2-b056-4647-a366-e1eb4fae66eb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.764658] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Created folder: Instances in parent group-v288524. [ 898.765034] env[61629]: DEBUG oslo.service.loopingcall [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.765308] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 898.765578] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b45c29c-925a-4af3-bf9c-ca8dfc36cea6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.789489] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 898.789489] env[61629]: value = "task-1354256" [ 898.789489] env[61629]: _type = "Task" [ 898.789489] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.799284] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354256, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.931154] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354251, 'name': ReconfigVM_Task, 'duration_secs': 0.369268} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.939190] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 898.939700] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f90c525e-390e-44e5-8a3e-bd03883f1344 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.947352] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354253, 'name': CloneVM_Task} progress is 94%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.951838] env[61629]: DEBUG oslo_vmware.api [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Task: {'id': task-1354252, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.230627} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.952178] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 898.952178] env[61629]: value = "task-1354257" [ 898.952178] env[61629]: _type = "Task" [ 898.952178] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.952396] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 898.952600] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 898.952807] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 898.953571] env[61629]: INFO nova.compute.manager [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Took 1.16 seconds to destroy the instance on the hypervisor. [ 898.953571] env[61629]: DEBUG oslo.service.loopingcall [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.953571] env[61629]: DEBUG nova.compute.manager [-] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 898.953784] env[61629]: DEBUG nova.network.neutron [-] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 898.964694] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354257, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.120225] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.471s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.120957] env[61629]: DEBUG nova.compute.manager [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 899.125413] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.471s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.126917] env[61629]: INFO nova.compute.claims [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 899.130711] env[61629]: DEBUG nova.compute.manager [req-f40a84a5-b841-4362-987c-ab7d6531432d req-c83fa34a-a5a7-4141-920c-68280d975aab service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Received event network-changed-5bb08edd-3639-401f-9e54-26abd98b246e {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 899.130905] env[61629]: DEBUG nova.compute.manager [req-f40a84a5-b841-4362-987c-ab7d6531432d req-c83fa34a-a5a7-4141-920c-68280d975aab service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Refreshing instance network info cache due to event network-changed-5bb08edd-3639-401f-9e54-26abd98b246e. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 899.131207] env[61629]: DEBUG oslo_concurrency.lockutils [req-f40a84a5-b841-4362-987c-ab7d6531432d req-c83fa34a-a5a7-4141-920c-68280d975aab service nova] Acquiring lock "refresh_cache-274e3437-eacd-4299-9c27-97bbb0ebf1c1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.131400] env[61629]: DEBUG oslo_concurrency.lockutils [req-f40a84a5-b841-4362-987c-ab7d6531432d req-c83fa34a-a5a7-4141-920c-68280d975aab service nova] Acquired lock "refresh_cache-274e3437-eacd-4299-9c27-97bbb0ebf1c1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.131616] env[61629]: DEBUG nova.network.neutron [req-f40a84a5-b841-4362-987c-ab7d6531432d req-c83fa34a-a5a7-4141-920c-68280d975aab service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Refreshing network info cache for port 5bb08edd-3639-401f-9e54-26abd98b246e {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 899.149324] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354248, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.299375] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354256, 'name': CreateVM_Task, 'duration_secs': 0.390982} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.299594] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 899.300269] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.300444] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.300779] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 899.301050] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d0de1bc-4868-42b6-90e6-586847a9556b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.306155] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for the task: (returnval){ [ 899.306155] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]526f0d44-c68a-2683-48ce-41f41af5a225" [ 899.306155] env[61629]: _type = "Task" [ 899.306155] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.314478] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526f0d44-c68a-2683-48ce-41f41af5a225, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.440709] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354253, 'name': CloneVM_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.463227] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354257, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.637784] env[61629]: DEBUG nova.compute.utils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 899.646025] env[61629]: DEBUG nova.compute.manager [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 899.646025] env[61629]: DEBUG nova.network.neutron [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 899.652442] env[61629]: DEBUG nova.network.neutron [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Successfully updated port: e28dd480-831a-49f0-804e-ad88763d3c24 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 899.665321] env[61629]: DEBUG oslo_vmware.api [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354248, 'name': PowerOnVM_Task, 'duration_secs': 1.742485} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.665844] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 899.666183] env[61629]: INFO nova.compute.manager [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Took 10.05 seconds to spawn the instance on the hypervisor. [ 899.666531] env[61629]: DEBUG nova.compute.manager [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 899.667678] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00658fdc-333e-4254-97f7-363deb53cdf0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.738708] env[61629]: DEBUG nova.policy [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38cc8b6343d54d30a3f6f13512d23020', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e7fced3a50d4821b42cf087d8111cb7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 899.785018] env[61629]: DEBUG nova.network.neutron [-] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.816973] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526f0d44-c68a-2683-48ce-41f41af5a225, 'name': SearchDatastore_Task, 'duration_secs': 0.014884} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.817360] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.817601] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.817839] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.818018] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.818195] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.818478] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26ae1ac0-fb60-4b04-9a8d-9a359839fbdf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.833231] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.833231] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 899.833231] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e95e07e3-633b-4079-90f8-46c0384a51b4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.837049] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for the task: (returnval){ [ 899.837049] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5264acc7-39ba-8f1a-3c45-5d0d7537610f" [ 899.837049] env[61629]: _type = "Task" [ 899.837049] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.847105] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5264acc7-39ba-8f1a-3c45-5d0d7537610f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.935532] env[61629]: DEBUG nova.network.neutron [req-f40a84a5-b841-4362-987c-ab7d6531432d req-c83fa34a-a5a7-4141-920c-68280d975aab service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Updated VIF entry in instance network info cache for port 5bb08edd-3639-401f-9e54-26abd98b246e. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 899.935895] env[61629]: DEBUG nova.network.neutron [req-f40a84a5-b841-4362-987c-ab7d6531432d req-c83fa34a-a5a7-4141-920c-68280d975aab service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Updating instance_info_cache with network_info: [{"id": "5bb08edd-3639-401f-9e54-26abd98b246e", "address": "fa:16:3e:19:1e:55", "network": {"id": "a022be65-b398-460a-a741-9190e3d7e38d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1776770394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a35cec60cf464a1c9f8215dbc6403a84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bb08edd-36", "ovs_interfaceid": "5bb08edd-3639-401f-9e54-26abd98b246e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.943790] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354253, 'name': CloneVM_Task, 'duration_secs': 1.09897} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.944088] env[61629]: INFO nova.virt.vmwareapi.vmops [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Created linked-clone VM from snapshot [ 899.944911] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfa13ce-83cb-4d73-b40e-c5db0ae9d582 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.955440] env[61629]: DEBUG nova.virt.vmwareapi.images [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Uploading image ac4d28b3-1192-4538-9235-fadc4691afd7 {{(pid=61629) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 899.968393] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354257, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.973148] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Destroying the VM {{(pid=61629) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 899.973423] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c438402a-3e4f-4c29-b3a6-8bb050bac87c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.980556] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for the task: (returnval){ [ 899.980556] env[61629]: value = "task-1354258" [ 899.980556] env[61629]: _type = "Task" [ 899.980556] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.989392] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354258, 'name': Destroy_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.091504] env[61629]: DEBUG nova.network.neutron [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Successfully created port: a193ab2f-5a9d-4411-94f9-cc5834b60795 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 900.155897] env[61629]: DEBUG nova.compute.manager [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 900.162021] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "refresh_cache-7cf87381-235e-449b-8269-61c2d4033028" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.162021] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "refresh_cache-7cf87381-235e-449b-8269-61c2d4033028" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.162021] env[61629]: DEBUG nova.network.neutron [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 900.192138] env[61629]: INFO nova.compute.manager [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Took 30.93 seconds to build instance. [ 900.290283] env[61629]: INFO nova.compute.manager [-] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Took 1.34 seconds to deallocate network for instance. [ 900.348183] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5264acc7-39ba-8f1a-3c45-5d0d7537610f, 'name': SearchDatastore_Task, 'duration_secs': 0.016663} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.351899] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edc9f606-e3c0-487c-b58d-da1cf0c784af {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.358235] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for the task: (returnval){ [ 900.358235] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52813ba9-0fa7-b423-b74b-10bef4b15b13" [ 900.358235] env[61629]: _type = "Task" [ 900.358235] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.368515] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52813ba9-0fa7-b423-b74b-10bef4b15b13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.438470] env[61629]: DEBUG oslo_concurrency.lockutils [req-f40a84a5-b841-4362-987c-ab7d6531432d req-c83fa34a-a5a7-4141-920c-68280d975aab service nova] Releasing lock "refresh_cache-274e3437-eacd-4299-9c27-97bbb0ebf1c1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.465918] env[61629]: DEBUG oslo_vmware.api [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354257, 'name': PowerOnVM_Task, 'duration_secs': 1.461181} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.473856] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 900.479158] env[61629]: DEBUG nova.compute.manager [None req-4bf3198c-9581-4bab-b799-9b8963360fd8 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 900.479158] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a136e0b1-61c3-4028-a3cb-b783c38d6226 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.505023] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354258, 'name': Destroy_Task} progress is 33%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.506828] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc32103-5da7-4f71-b3b1-5d9baa6e7d7c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.514636] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490e6bff-2976-43a0-b9fb-bb52db99eb6b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.552944] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9ec9be-5de8-413f-9c82-793a8635a05c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.562742] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0da56c6-3412-424e-ad9f-5629c615d222 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.583252] env[61629]: DEBUG nova.compute.provider_tree [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.698407] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cb21fb-1b75-4352-8ee2-44aac0f314a2 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.606s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.700878] env[61629]: DEBUG nova.network.neutron [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 900.801566] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.877224] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52813ba9-0fa7-b423-b74b-10bef4b15b13, 'name': SearchDatastore_Task, 'duration_secs': 0.017075} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.878084] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.878084] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 274e3437-eacd-4299-9c27-97bbb0ebf1c1/274e3437-eacd-4299-9c27-97bbb0ebf1c1.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 900.878311] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-310a5c57-0f77-4c6c-a9db-6cb828399d9a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.889196] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for the task: (returnval){ [ 900.889196] env[61629]: value = "task-1354259" [ 900.889196] env[61629]: _type = "Task" [ 900.889196] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.901278] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354259, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.946434] env[61629]: DEBUG nova.network.neutron [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Updating instance_info_cache with network_info: [{"id": "e28dd480-831a-49f0-804e-ad88763d3c24", "address": "fa:16:3e:9f:0b:da", "network": {"id": "534e08bb-ebea-429f-8a3d-733c418ea99b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1143213928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6d1f876ee054beb89ca0eb0776ddcd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape28dd480-83", "ovs_interfaceid": "e28dd480-831a-49f0-804e-ad88763d3c24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.997686] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354258, 'name': Destroy_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.088020] env[61629]: DEBUG nova.scheduler.client.report [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 901.169082] env[61629]: DEBUG nova.compute.manager [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 901.198629] env[61629]: DEBUG nova.virt.hardware [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 901.198896] env[61629]: DEBUG nova.virt.hardware [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 901.199070] env[61629]: DEBUG nova.virt.hardware [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 901.199257] env[61629]: DEBUG nova.virt.hardware [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 901.199505] env[61629]: DEBUG nova.virt.hardware [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 901.199589] env[61629]: DEBUG nova.virt.hardware [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 901.199757] env[61629]: DEBUG nova.virt.hardware [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 901.199917] env[61629]: DEBUG nova.virt.hardware [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 901.200104] env[61629]: DEBUG nova.virt.hardware [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 901.200276] env[61629]: DEBUG nova.virt.hardware [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 901.200452] env[61629]: DEBUG nova.virt.hardware [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 901.201786] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9295db-e30a-4e83-8d5e-75ccc8ea2402 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.210361] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe05d68-1be6-4199-a0a6-a802ace36181 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.217040] env[61629]: DEBUG nova.compute.manager [req-1bafa1c1-4f3a-4de0-bbe2-f6d0dd67bdd7 req-6e59241b-5a5a-44f6-bfbb-4deebdce9d2d service nova] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Received event network-vif-deleted-155236cd-5bf3-4503-8968-010a3af74156 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 901.217250] env[61629]: DEBUG nova.compute.manager [req-1bafa1c1-4f3a-4de0-bbe2-f6d0dd67bdd7 req-6e59241b-5a5a-44f6-bfbb-4deebdce9d2d service nova] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Received event network-vif-plugged-e28dd480-831a-49f0-804e-ad88763d3c24 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 901.217456] env[61629]: DEBUG oslo_concurrency.lockutils [req-1bafa1c1-4f3a-4de0-bbe2-f6d0dd67bdd7 req-6e59241b-5a5a-44f6-bfbb-4deebdce9d2d service nova] Acquiring lock "7cf87381-235e-449b-8269-61c2d4033028-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.217666] env[61629]: DEBUG oslo_concurrency.lockutils [req-1bafa1c1-4f3a-4de0-bbe2-f6d0dd67bdd7 req-6e59241b-5a5a-44f6-bfbb-4deebdce9d2d service nova] Lock "7cf87381-235e-449b-8269-61c2d4033028-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.217839] env[61629]: DEBUG oslo_concurrency.lockutils [req-1bafa1c1-4f3a-4de0-bbe2-f6d0dd67bdd7 req-6e59241b-5a5a-44f6-bfbb-4deebdce9d2d service nova] Lock "7cf87381-235e-449b-8269-61c2d4033028-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.218023] env[61629]: DEBUG nova.compute.manager [req-1bafa1c1-4f3a-4de0-bbe2-f6d0dd67bdd7 req-6e59241b-5a5a-44f6-bfbb-4deebdce9d2d service nova] [instance: 7cf87381-235e-449b-8269-61c2d4033028] No waiting events found dispatching network-vif-plugged-e28dd480-831a-49f0-804e-ad88763d3c24 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 901.218193] env[61629]: WARNING nova.compute.manager [req-1bafa1c1-4f3a-4de0-bbe2-f6d0dd67bdd7 req-6e59241b-5a5a-44f6-bfbb-4deebdce9d2d service nova] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Received unexpected event network-vif-plugged-e28dd480-831a-49f0-804e-ad88763d3c24 for instance with vm_state building and task_state spawning. [ 901.218359] env[61629]: DEBUG nova.compute.manager [req-1bafa1c1-4f3a-4de0-bbe2-f6d0dd67bdd7 req-6e59241b-5a5a-44f6-bfbb-4deebdce9d2d service nova] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Received event network-changed-e28dd480-831a-49f0-804e-ad88763d3c24 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 901.218518] env[61629]: DEBUG nova.compute.manager [req-1bafa1c1-4f3a-4de0-bbe2-f6d0dd67bdd7 req-6e59241b-5a5a-44f6-bfbb-4deebdce9d2d service nova] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Refreshing instance network info cache due to event network-changed-e28dd480-831a-49f0-804e-ad88763d3c24. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 901.218688] env[61629]: DEBUG oslo_concurrency.lockutils [req-1bafa1c1-4f3a-4de0-bbe2-f6d0dd67bdd7 req-6e59241b-5a5a-44f6-bfbb-4deebdce9d2d service nova] Acquiring lock "refresh_cache-7cf87381-235e-449b-8269-61c2d4033028" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.402075] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354259, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.448823] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "refresh_cache-7cf87381-235e-449b-8269-61c2d4033028" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.449201] env[61629]: DEBUG nova.compute.manager [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Instance network_info: |[{"id": "e28dd480-831a-49f0-804e-ad88763d3c24", "address": "fa:16:3e:9f:0b:da", "network": {"id": "534e08bb-ebea-429f-8a3d-733c418ea99b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1143213928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6d1f876ee054beb89ca0eb0776ddcd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape28dd480-83", "ovs_interfaceid": "e28dd480-831a-49f0-804e-ad88763d3c24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 901.449621] env[61629]: DEBUG oslo_concurrency.lockutils [req-1bafa1c1-4f3a-4de0-bbe2-f6d0dd67bdd7 req-6e59241b-5a5a-44f6-bfbb-4deebdce9d2d service nova] Acquired lock "refresh_cache-7cf87381-235e-449b-8269-61c2d4033028" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.449909] env[61629]: DEBUG nova.network.neutron [req-1bafa1c1-4f3a-4de0-bbe2-f6d0dd67bdd7 req-6e59241b-5a5a-44f6-bfbb-4deebdce9d2d service nova] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Refreshing network info cache for port e28dd480-831a-49f0-804e-ad88763d3c24 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 901.451620] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:0b:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98f447de-d71e-41ef-bc37-ed97b4a1f58f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e28dd480-831a-49f0-804e-ad88763d3c24', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 901.461028] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Creating folder: Project (c6d1f876ee054beb89ca0eb0776ddcd5). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 901.462207] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04eeba63-5498-4c6c-90ab-1963fbd40ca5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.473691] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Created folder: Project (c6d1f876ee054beb89ca0eb0776ddcd5) in parent group-v288443. [ 901.473912] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Creating folder: Instances. Parent ref: group-v288527. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 901.474256] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-346570ad-517a-46bd-bc68-3781bba49919 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.486414] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Created folder: Instances in parent group-v288527. [ 901.486414] env[61629]: DEBUG oslo.service.loopingcall [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.486414] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 901.486670] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4dfe659-7927-41b9-9138-4fc4b5187e49 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.511905] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354258, 'name': Destroy_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.513735] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 901.513735] env[61629]: value = "task-1354262" [ 901.513735] env[61629]: _type = "Task" [ 901.513735] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.524991] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354262, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.570440] env[61629]: INFO nova.compute.manager [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Unrescuing [ 901.570907] env[61629]: DEBUG oslo_concurrency.lockutils [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "refresh_cache-87a1383f-d66b-4bde-b153-89ac62ff8390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.571183] env[61629]: DEBUG oslo_concurrency.lockutils [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "refresh_cache-87a1383f-d66b-4bde-b153-89ac62ff8390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.571472] env[61629]: DEBUG nova.network.neutron [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.591470] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.466s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.592035] env[61629]: DEBUG nova.compute.manager [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 901.594620] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 18.756s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.649130] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.649379] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.901441] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354259, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.998248] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354258, 'name': Destroy_Task, 'duration_secs': 1.555769} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.999216] env[61629]: DEBUG nova.network.neutron [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Successfully updated port: a193ab2f-5a9d-4411-94f9-cc5834b60795 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 902.000989] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Destroyed the VM [ 902.001122] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Deleting Snapshot of the VM instance {{(pid=61629) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 902.004061] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-832e67b7-1d84-4038-9d1c-20f3cc7d681a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.014785] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for the task: (returnval){ [ 902.014785] env[61629]: value = "task-1354263" [ 902.014785] env[61629]: _type = "Task" [ 902.014785] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.028281] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354262, 'name': CreateVM_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.031848] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354263, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.034240] env[61629]: DEBUG nova.compute.manager [req-fc85b232-cb41-4756-b6c9-e7454a282877 req-3593ab54-b95f-47dc-a209-4d29a1165d89 service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Received event network-vif-plugged-a193ab2f-5a9d-4411-94f9-cc5834b60795 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 902.034559] env[61629]: DEBUG oslo_concurrency.lockutils [req-fc85b232-cb41-4756-b6c9-e7454a282877 req-3593ab54-b95f-47dc-a209-4d29a1165d89 service nova] Acquiring lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.034712] env[61629]: DEBUG oslo_concurrency.lockutils [req-fc85b232-cb41-4756-b6c9-e7454a282877 req-3593ab54-b95f-47dc-a209-4d29a1165d89 service nova] Lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.034962] env[61629]: DEBUG oslo_concurrency.lockutils [req-fc85b232-cb41-4756-b6c9-e7454a282877 req-3593ab54-b95f-47dc-a209-4d29a1165d89 service nova] Lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.035073] env[61629]: DEBUG nova.compute.manager [req-fc85b232-cb41-4756-b6c9-e7454a282877 req-3593ab54-b95f-47dc-a209-4d29a1165d89 service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] No waiting events found dispatching network-vif-plugged-a193ab2f-5a9d-4411-94f9-cc5834b60795 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 902.035220] env[61629]: WARNING nova.compute.manager [req-fc85b232-cb41-4756-b6c9-e7454a282877 req-3593ab54-b95f-47dc-a209-4d29a1165d89 service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Received unexpected event network-vif-plugged-a193ab2f-5a9d-4411-94f9-cc5834b60795 for instance with vm_state building and task_state spawning. [ 902.099200] env[61629]: DEBUG nova.compute.utils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 902.102034] env[61629]: DEBUG nova.compute.manager [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 902.102034] env[61629]: DEBUG nova.network.neutron [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 902.152392] env[61629]: DEBUG nova.compute.utils [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 902.401726] env[61629]: DEBUG nova.policy [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b122825ec88f44ed834479f30cde698c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ef41f406d18447fbee4e7b7ae52a2d2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 902.408119] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354259, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.155061} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.408634] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 274e3437-eacd-4299-9c27-97bbb0ebf1c1/274e3437-eacd-4299-9c27-97bbb0ebf1c1.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 902.408987] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 902.409296] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ddd4db61-65e9-406f-a716-c529202dc8e6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.422296] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for the task: (returnval){ [ 902.422296] env[61629]: value = "task-1354264" [ 902.422296] env[61629]: _type = "Task" [ 902.422296] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.437269] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354264, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.459369] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.459692] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.502375] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.502573] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.502756] env[61629]: DEBUG nova.network.neutron [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 902.509715] env[61629]: DEBUG nova.network.neutron [req-1bafa1c1-4f3a-4de0-bbe2-f6d0dd67bdd7 req-6e59241b-5a5a-44f6-bfbb-4deebdce9d2d service nova] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Updated VIF entry in instance network info cache for port e28dd480-831a-49f0-804e-ad88763d3c24. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 902.510299] env[61629]: DEBUG nova.network.neutron [req-1bafa1c1-4f3a-4de0-bbe2-f6d0dd67bdd7 req-6e59241b-5a5a-44f6-bfbb-4deebdce9d2d service nova] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Updating instance_info_cache with network_info: [{"id": "e28dd480-831a-49f0-804e-ad88763d3c24", "address": "fa:16:3e:9f:0b:da", "network": {"id": "534e08bb-ebea-429f-8a3d-733c418ea99b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1143213928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6d1f876ee054beb89ca0eb0776ddcd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape28dd480-83", "ovs_interfaceid": "e28dd480-831a-49f0-804e-ad88763d3c24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.528597] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354262, 'name': CreateVM_Task, 'duration_secs': 0.747935} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.529294] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 902.530384] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.530570] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.531241] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 902.535489] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ac0b861-b6c1-4aa1-98f2-35934d86e779 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.538627] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354263, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.544166] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 902.544166] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]521cb513-539f-075b-4ab9-1992da40d334" [ 902.544166] env[61629]: _type = "Task" [ 902.544166] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.553444] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]521cb513-539f-075b-4ab9-1992da40d334, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.582021] env[61629]: DEBUG nova.network.neutron [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Updating instance_info_cache with network_info: [{"id": "bff06c9b-54d2-4109-b2de-70fbab2c58d4", "address": "fa:16:3e:7c:c6:f7", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbff06c9b-54", "ovs_interfaceid": "bff06c9b-54d2-4109-b2de-70fbab2c58d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.618799] env[61629]: DEBUG nova.compute.manager [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 902.650056] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 902.650550] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 2b01eeae-64be-44b3-b4cf-c2a8490043e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 902.650740] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 09890839-b1d9-4558-992d-b1a6f4c5f750 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 902.650877] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 902.651011] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance d37958f8-7607-418b-9cfd-c3a5df721e94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 902.651223] env[61629]: WARNING nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance da1eb7f9-7562-40c8-955b-c11f831b7bc8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 902.651358] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 87a1383f-d66b-4bde-b153-89ac62ff8390 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 902.651490] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 0d21b352-bdd0-4887-8658-cd5c448352d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 902.651850] env[61629]: WARNING nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 55f2d2fc-9404-422f-ba08-72e6e11a089f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 902.652083] env[61629]: WARNING nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 12c6b03b-8295-43de-898f-a6c35f1693b7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 902.652234] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 1d451558-dbbc-4942-b739-5d4b88057a75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 902.652402] env[61629]: WARNING nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 109ab664-3bb9-420e-a4a5-526277c60b96 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 902.652534] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance a42d5132-22e5-4551-80d2-fb7a55a7fa9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 902.652653] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 274e3437-eacd-4299-9c27-97bbb0ebf1c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 902.652771] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 7cf87381-235e-449b-8269-61c2d4033028 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 902.652995] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 7c3e9d0f-88a8-41fe-bf61-e3db34d36928 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 902.653092] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 3085a70f-360c-43a3-80d7-e7b87fb3e146 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 902.655718] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.744168] env[61629]: DEBUG nova.network.neutron [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Successfully created port: f88f7616-a027-435a-b8a9-2a3cfffadd38 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 902.934095] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354264, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073209} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.934391] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 902.935354] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8476060-23f5-4072-9e3a-62425e9210c3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.960074] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 274e3437-eacd-4299-9c27-97bbb0ebf1c1/274e3437-eacd-4299-9c27-97bbb0ebf1c1.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 902.960405] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f5ce5e8-b092-4294-aa17-950c6588cf1e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.977132] env[61629]: DEBUG nova.compute.utils [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 902.983656] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for the task: (returnval){ [ 902.983656] env[61629]: value = "task-1354265" [ 902.983656] env[61629]: _type = "Task" [ 902.983656] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.993407] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354265, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.011254] env[61629]: DEBUG nova.network.neutron [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Successfully created port: 6df3f8f5-6430-44cb-ac3e-34209467a856 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 903.013210] env[61629]: DEBUG oslo_concurrency.lockutils [req-1bafa1c1-4f3a-4de0-bbe2-f6d0dd67bdd7 req-6e59241b-5a5a-44f6-bfbb-4deebdce9d2d service nova] Releasing lock "refresh_cache-7cf87381-235e-449b-8269-61c2d4033028" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.028618] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354263, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.045895] env[61629]: DEBUG nova.network.neutron [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 903.057900] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]521cb513-539f-075b-4ab9-1992da40d334, 'name': SearchDatastore_Task, 'duration_secs': 0.016217} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.058289] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.058606] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 903.058896] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.059116] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.059351] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 903.059643] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e6bdfaa-2637-47a2-917c-f74eebad2148 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.068728] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 903.069017] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 903.071716] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66481380-c446-46b7-8013-d380fe4e1d80 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.079851] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 903.079851] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5274b57a-71b0-4e7d-b00f-3e51d0825723" [ 903.079851] env[61629]: _type = "Task" [ 903.079851] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.084928] env[61629]: DEBUG oslo_concurrency.lockutils [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "refresh_cache-87a1383f-d66b-4bde-b153-89ac62ff8390" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.085466] env[61629]: DEBUG nova.objects.instance [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lazy-loading 'flavor' on Instance uuid 87a1383f-d66b-4bde-b153-89ac62ff8390 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 903.092467] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5274b57a-71b0-4e7d-b00f-3e51d0825723, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.157964] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.207843] env[61629]: DEBUG nova.network.neutron [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Updating instance_info_cache with network_info: [{"id": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "address": "fa:16:3e:f8:02:ef", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa193ab2f-5a", "ovs_interfaceid": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.481575] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.022s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.494585] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354265, 'name': ReconfigVM_Task, 'duration_secs': 0.381502} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.494892] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 274e3437-eacd-4299-9c27-97bbb0ebf1c1/274e3437-eacd-4299-9c27-97bbb0ebf1c1.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 903.495595] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d7d50e9b-f2e4-4650-a9a0-1abb89f0259a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.501961] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for the task: (returnval){ [ 903.501961] env[61629]: value = "task-1354266" [ 903.501961] env[61629]: _type = "Task" [ 903.501961] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.510524] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354266, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.528026] env[61629]: DEBUG oslo_vmware.api [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354263, 'name': RemoveSnapshot_Task, 'duration_secs': 1.02058} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.528341] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Deleted Snapshot of the VM instance {{(pid=61629) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 903.590295] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5274b57a-71b0-4e7d-b00f-3e51d0825723, 'name': SearchDatastore_Task, 'duration_secs': 0.019974} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.590658] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53b96b15-eaba-44f0-9d89-e5bef9428a0d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.596871] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b02f06e-805e-4acb-b595-8cbf8ff29e28 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.601010] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 903.601010] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5267a888-8e8d-2962-ef00-36bbee951b98" [ 903.601010] env[61629]: _type = "Task" [ 903.601010] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.622149] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 903.622917] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e087265-449d-497a-bc2c-645a6d352dc1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.628301] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5267a888-8e8d-2962-ef00-36bbee951b98, 'name': SearchDatastore_Task, 'duration_secs': 0.021653} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.629065] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.629396] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 7cf87381-235e-449b-8269-61c2d4033028/7cf87381-235e-449b-8269-61c2d4033028.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 903.629674] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-624ab009-6068-4f65-b89d-892067bb9801 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.632674] env[61629]: DEBUG nova.compute.manager [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 903.636643] env[61629]: DEBUG oslo_vmware.api [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 903.636643] env[61629]: value = "task-1354267" [ 903.636643] env[61629]: _type = "Task" [ 903.636643] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.642137] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 903.642137] env[61629]: value = "task-1354268" [ 903.642137] env[61629]: _type = "Task" [ 903.642137] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.650426] env[61629]: DEBUG oslo_vmware.api [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354267, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.657486] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354268, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.661542] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance cd165a78-21f9-4fc7-88e5-5ab35047eacc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.666301] env[61629]: DEBUG nova.virt.hardware [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 903.666593] env[61629]: DEBUG nova.virt.hardware [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 903.666764] env[61629]: DEBUG nova.virt.hardware [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 903.666963] env[61629]: DEBUG nova.virt.hardware [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 903.667131] env[61629]: DEBUG nova.virt.hardware [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 903.667282] env[61629]: DEBUG nova.virt.hardware [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 903.667511] env[61629]: DEBUG nova.virt.hardware [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 903.667696] env[61629]: DEBUG nova.virt.hardware [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 903.667873] env[61629]: DEBUG nova.virt.hardware [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 903.668052] env[61629]: DEBUG nova.virt.hardware [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 903.668234] env[61629]: DEBUG nova.virt.hardware [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 903.669175] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0376ccf-397c-4c71-9130-c6908ab25bb4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.677521] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fcef0a3-a1d4-44e0-ab63-5d115de27aba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.710987] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.711436] env[61629]: DEBUG nova.compute.manager [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Instance network_info: |[{"id": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "address": "fa:16:3e:f8:02:ef", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa193ab2f-5a", "ovs_interfaceid": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 903.711947] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:02:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2c2daf7c-c01b-41b1-a09a-fb8b893b4c80', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a193ab2f-5a9d-4411-94f9-cc5834b60795', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 903.720380] env[61629]: DEBUG oslo.service.loopingcall [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 903.721051] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 903.721300] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a22c150-8bc7-428d-97a1-b2ebc86fb736 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.738679] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.738931] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.739174] env[61629]: INFO nova.compute.manager [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Attaching volume 5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66 to /dev/sdb [ 903.748018] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 903.748018] env[61629]: value = "task-1354269" [ 903.748018] env[61629]: _type = "Task" [ 903.748018] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.757063] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354269, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.775959] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5edb3804-be7c-4641-8807-efd016f0b84a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.783140] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd4cf65-56ed-4fd1-9f74-27aa1bdc7f28 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.796854] env[61629]: DEBUG nova.virt.block_device [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Updating existing volume attachment record: ed631ddc-9621-4419-9db8-38290ac36c72 {{(pid=61629) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 904.012409] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354266, 'name': Rename_Task, 'duration_secs': 0.171177} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.012737] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 904.013027] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f77ea2b-eebe-4b41-b7df-46ea8a6c9da6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.022521] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for the task: (returnval){ [ 904.022521] env[61629]: value = "task-1354271" [ 904.022521] env[61629]: _type = "Task" [ 904.022521] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.034034] env[61629]: WARNING nova.compute.manager [None req-c363e6c9-b1bf-4388-b126-5ab3693d01e8 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Image not found during snapshot: nova.exception.ImageNotFound: Image ac4d28b3-1192-4538-9235-fadc4691afd7 could not be found. [ 904.036226] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354271, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.153209] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354268, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.157653] env[61629]: DEBUG oslo_vmware.api [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354267, 'name': PowerOffVM_Task, 'duration_secs': 0.319001} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.158172] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 904.165265] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Reconfiguring VM instance instance-0000004a to detach disk 2001 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 904.167268] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance c5b6f6b8-587c-4b74-bc83-98dac319b15b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 904.167268] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 904.167268] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 904.169706] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4fd9a36-67d8-4138-a547-cd0944df4418 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.194022] env[61629]: DEBUG oslo_vmware.api [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 904.194022] env[61629]: value = "task-1354274" [ 904.194022] env[61629]: _type = "Task" [ 904.194022] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.200760] env[61629]: DEBUG oslo_vmware.api [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354274, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.267243] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354269, 'name': CreateVM_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.309828] env[61629]: DEBUG nova.compute.manager [req-4cb91018-0db0-4cb1-a641-abc579dcb35b req-a3953e62-6f86-4cd3-bfec-ced411b054de service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Received event network-changed-a193ab2f-5a9d-4411-94f9-cc5834b60795 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 904.310802] env[61629]: DEBUG nova.compute.manager [req-4cb91018-0db0-4cb1-a641-abc579dcb35b req-a3953e62-6f86-4cd3-bfec-ced411b054de service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Refreshing instance network info cache due to event network-changed-a193ab2f-5a9d-4411-94f9-cc5834b60795. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 904.310802] env[61629]: DEBUG oslo_concurrency.lockutils [req-4cb91018-0db0-4cb1-a641-abc579dcb35b req-a3953e62-6f86-4cd3-bfec-ced411b054de service nova] Acquiring lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.310802] env[61629]: DEBUG oslo_concurrency.lockutils [req-4cb91018-0db0-4cb1-a641-abc579dcb35b req-a3953e62-6f86-4cd3-bfec-ced411b054de service nova] Acquired lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.310802] env[61629]: DEBUG nova.network.neutron [req-4cb91018-0db0-4cb1-a641-abc579dcb35b req-a3953e62-6f86-4cd3-bfec-ced411b054de service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Refreshing network info cache for port a193ab2f-5a9d-4411-94f9-cc5834b60795 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 904.451904] env[61629]: DEBUG nova.compute.manager [req-279f0b12-b253-47d7-8c43-9aec65677d02 req-78881a89-ad3b-42fa-90da-1cd712f8b47b service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Received event network-vif-plugged-f88f7616-a027-435a-b8a9-2a3cfffadd38 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 904.452155] env[61629]: DEBUG oslo_concurrency.lockutils [req-279f0b12-b253-47d7-8c43-9aec65677d02 req-78881a89-ad3b-42fa-90da-1cd712f8b47b service nova] Acquiring lock "3085a70f-360c-43a3-80d7-e7b87fb3e146-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.452372] env[61629]: DEBUG oslo_concurrency.lockutils [req-279f0b12-b253-47d7-8c43-9aec65677d02 req-78881a89-ad3b-42fa-90da-1cd712f8b47b service nova] Lock "3085a70f-360c-43a3-80d7-e7b87fb3e146-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.452543] env[61629]: DEBUG oslo_concurrency.lockutils [req-279f0b12-b253-47d7-8c43-9aec65677d02 req-78881a89-ad3b-42fa-90da-1cd712f8b47b service nova] Lock "3085a70f-360c-43a3-80d7-e7b87fb3e146-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.452711] env[61629]: DEBUG nova.compute.manager [req-279f0b12-b253-47d7-8c43-9aec65677d02 req-78881a89-ad3b-42fa-90da-1cd712f8b47b service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] No waiting events found dispatching network-vif-plugged-f88f7616-a027-435a-b8a9-2a3cfffadd38 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 904.452876] env[61629]: WARNING nova.compute.manager [req-279f0b12-b253-47d7-8c43-9aec65677d02 req-78881a89-ad3b-42fa-90da-1cd712f8b47b service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Received unexpected event network-vif-plugged-f88f7616-a027-435a-b8a9-2a3cfffadd38 for instance with vm_state building and task_state spawning. [ 904.509032] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad7a575-50d1-4872-ae4d-e08b15f740da {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.516428] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3087d4d-f97b-4363-a4b3-12eda7940b17 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.547835] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.548153] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.548459] env[61629]: INFO nova.compute.manager [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Attaching volume 038d52b8-1702-41d3-b2ef-775b800e1724 to /dev/sdb [ 904.554319] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0added14-2ec5-4887-b6ac-60802472668f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.349503] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "1d451558-dbbc-4942-b739-5d4b88057a75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.349777] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "1d451558-dbbc-4942-b739-5d4b88057a75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.349986] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "1d451558-dbbc-4942-b739-5d4b88057a75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.350182] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "1d451558-dbbc-4942-b739-5d4b88057a75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.350350] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "1d451558-dbbc-4942-b739-5d4b88057a75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.352701] env[61629]: DEBUG nova.network.neutron [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Successfully updated port: f88f7616-a027-435a-b8a9-2a3cfffadd38 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 905.364165] env[61629]: INFO nova.compute.manager [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Terminating instance [ 905.372050] env[61629]: DEBUG oslo_vmware.api [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354271, 'name': PowerOnVM_Task, 'duration_secs': 1.232393} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.372139] env[61629]: DEBUG nova.compute.manager [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 905.372345] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 905.375671] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0d1836-a719-4d3d-b852-883d2226d6db {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.382606] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 905.382606] env[61629]: INFO nova.compute.manager [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Took 9.24 seconds to spawn the instance on the hypervisor. [ 905.382606] env[61629]: DEBUG nova.compute.manager [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 905.382606] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dadf095e-a35c-450f-a282-6361cc60fd15 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.392669] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1dc1afd-21f5-40b2-a5cf-e7c0667d46f9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.395471] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6541bb7f-b03d-47cb-85b5-5189b6d3aa06 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.398076] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354268, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.762728} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.398298] env[61629]: DEBUG oslo_vmware.api [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354274, 'name': ReconfigVM_Task, 'duration_secs': 0.48506} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.399806] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 7cf87381-235e-449b-8269-61c2d4033028/7cf87381-235e-449b-8269-61c2d4033028.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 905.400037] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 905.400337] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Reconfigured VM instance instance-0000004a to detach disk 2001 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 905.400518] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 905.404187] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a8073b2-fabc-4706-8fcd-267b26ad0e53 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.405966] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b63cec8-1d7d-46a2-87af-4f5960bbe6c1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.408397] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354269, 'name': CreateVM_Task, 'duration_secs': 1.285909} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.422457] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 905.422457] env[61629]: DEBUG nova.compute.provider_tree [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 905.426312] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.432020] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.432020] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.432020] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 905.432020] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d1b073-9e18-40b6-a8a9-d99edb0f524a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.432397] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-121ac7f9-8d39-4e95-9dfc-392f48acffb0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.436290] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5108840d-01d3-4d39-a029-9c5018e78ee8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.438272] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 905.438272] env[61629]: value = "task-1354275" [ 905.438272] env[61629]: _type = "Task" [ 905.438272] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.446341] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 905.446341] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52335be0-583b-4c40-572b-b28f8cb130d7" [ 905.446341] env[61629]: _type = "Task" [ 905.446341] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.446680] env[61629]: DEBUG oslo_vmware.api [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 905.446680] env[61629]: value = "task-1354276" [ 905.446680] env[61629]: _type = "Task" [ 905.446680] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.461922] env[61629]: DEBUG nova.virt.block_device [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Updating existing volume attachment record: 1d9f5735-ef1e-4d73-b7b6-3c3f689eb60e {{(pid=61629) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 905.464481] env[61629]: DEBUG oslo_vmware.api [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for the task: (returnval){ [ 905.464481] env[61629]: value = "task-1354277" [ 905.464481] env[61629]: _type = "Task" [ 905.464481] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.464772] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354275, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.478193] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52335be0-583b-4c40-572b-b28f8cb130d7, 'name': SearchDatastore_Task, 'duration_secs': 0.022004} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.478742] env[61629]: DEBUG oslo_vmware.api [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354276, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.480019] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.480284] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 905.480501] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.480657] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.480887] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 905.484914] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f62e105-2012-4727-93d9-a28e83a4825d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.490109] env[61629]: DEBUG oslo_vmware.api [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354277, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.499203] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 905.499405] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 905.500186] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcabae3d-bd87-4221-a71b-908f90fcc54e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.506075] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 905.506075] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]520e0258-cd52-dd25-922c-a06bf9051474" [ 905.506075] env[61629]: _type = "Task" [ 905.506075] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.516963] env[61629]: DEBUG oslo_vmware.rw_handles [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c0e2b5-70a5-6ed5-686a-8b4ad3c90313/disk-0.vmdk. {{(pid=61629) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 905.517992] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde2a6e2-a8e0-48bd-a4cc-53459962508d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.523304] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]520e0258-cd52-dd25-922c-a06bf9051474, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.526377] env[61629]: DEBUG oslo_vmware.rw_handles [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c0e2b5-70a5-6ed5-686a-8b4ad3c90313/disk-0.vmdk is in state: ready. {{(pid=61629) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 905.526586] env[61629]: ERROR oslo_vmware.rw_handles [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c0e2b5-70a5-6ed5-686a-8b4ad3c90313/disk-0.vmdk due to incomplete transfer. [ 905.526820] env[61629]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7220348f-bdc3-40a3-b582-d9b2d9b38392 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.532914] env[61629]: DEBUG oslo_vmware.rw_handles [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c0e2b5-70a5-6ed5-686a-8b4ad3c90313/disk-0.vmdk. {{(pid=61629) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 905.533136] env[61629]: DEBUG nova.virt.vmwareapi.images [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Uploaded image 1e2172a0-89df-4cab-a61f-a1c2288e9094 to the Glance image server {{(pid=61629) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 905.535668] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Destroying the VM {{(pid=61629) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 905.537981] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-28fbbb0b-93af-4c1d-ad03-8d53684aca10 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.543380] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 905.543380] env[61629]: value = "task-1354278" [ 905.543380] env[61629]: _type = "Task" [ 905.543380] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.551548] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354278, 'name': Destroy_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.670573] env[61629]: DEBUG nova.network.neutron [req-4cb91018-0db0-4cb1-a641-abc579dcb35b req-a3953e62-6f86-4cd3-bfec-ced411b054de service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Updated VIF entry in instance network info cache for port a193ab2f-5a9d-4411-94f9-cc5834b60795. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 905.670952] env[61629]: DEBUG nova.network.neutron [req-4cb91018-0db0-4cb1-a641-abc579dcb35b req-a3953e62-6f86-4cd3-bfec-ced411b054de service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Updating instance_info_cache with network_info: [{"id": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "address": "fa:16:3e:f8:02:ef", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa193ab2f-5a", "ovs_interfaceid": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.946100] env[61629]: INFO nova.compute.manager [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Took 32.08 seconds to build instance. [ 905.948088] env[61629]: ERROR nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [req-84b8fd38-e4ea-4900-9a2b-ebae438429b1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID d075eff1-6f77-44a8-824e-16f3e03b4063. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-84b8fd38-e4ea-4900-9a2b-ebae438429b1"}]} [ 905.958774] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354275, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072469} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.960054] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 905.960482] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294e9342-008a-47cd-be35-045377992aaf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.967237] env[61629]: DEBUG oslo_vmware.api [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354276, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.968652] env[61629]: DEBUG nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Refreshing inventories for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 905.991064] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 7cf87381-235e-449b-8269-61c2d4033028/7cf87381-235e-449b-8269-61c2d4033028.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 905.991951] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c16e239-6663-40cc-a40f-343471cc856c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.009543] env[61629]: DEBUG nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Updating ProviderTree inventory for provider d075eff1-6f77-44a8-824e-16f3e03b4063 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 906.010145] env[61629]: DEBUG nova.compute.provider_tree [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 906.011863] env[61629]: DEBUG oslo_vmware.api [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354277, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.016865] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 906.016865] env[61629]: value = "task-1354282" [ 906.016865] env[61629]: _type = "Task" [ 906.016865] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.020225] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]520e0258-cd52-dd25-922c-a06bf9051474, 'name': SearchDatastore_Task, 'duration_secs': 0.04502} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.024221] env[61629]: DEBUG nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Refreshing aggregate associations for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063, aggregates: None {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 906.025943] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b90455d-a431-489d-b244-f7371fd7d1e3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.036029] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354282, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.036029] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 906.036029] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52ed01bd-b7c9-b0a1-df13-ae0bd05e7757" [ 906.036029] env[61629]: _type = "Task" [ 906.036029] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.041793] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52ed01bd-b7c9-b0a1-df13-ae0bd05e7757, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.047626] env[61629]: DEBUG nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Refreshing trait associations for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 906.056836] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354278, 'name': Destroy_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.174557] env[61629]: DEBUG oslo_concurrency.lockutils [req-4cb91018-0db0-4cb1-a641-abc579dcb35b req-a3953e62-6f86-4cd3-bfec-ced411b054de service nova] Releasing lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.345611] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52695d5-66c7-4375-a556-e00b43892c6e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.356590] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e172d58-6589-4128-a68f-9b11aea2d1ca {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.390603] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ae41a7-dd45-48a2-a583-4ed645fcfadb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.398215] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca57f85-dbaf-4460-8d98-e8c85cfbeb38 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.411643] env[61629]: DEBUG nova.compute.provider_tree [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 906.453605] env[61629]: DEBUG oslo_concurrency.lockutils [None req-79c4e3e5-0241-4f08-b438-907c4aa9a5c6 tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.672s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.460981] env[61629]: DEBUG oslo_vmware.api [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354276, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.476184] env[61629]: DEBUG oslo_vmware.api [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354277, 'name': PowerOffVM_Task, 'duration_secs': 0.915193} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.476582] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 906.476781] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 906.477093] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-828e6bc8-7dda-4b42-a02f-9f173d20aa71 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.499992] env[61629]: DEBUG nova.compute.manager [req-2eb94706-12ef-4001-a12e-b8be82cb848c req-9fd26a71-b34c-4613-abb1-18e9c49470bc service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Received event network-changed-f88f7616-a027-435a-b8a9-2a3cfffadd38 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 906.500226] env[61629]: DEBUG nova.compute.manager [req-2eb94706-12ef-4001-a12e-b8be82cb848c req-9fd26a71-b34c-4613-abb1-18e9c49470bc service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Refreshing instance network info cache due to event network-changed-f88f7616-a027-435a-b8a9-2a3cfffadd38. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 906.500565] env[61629]: DEBUG oslo_concurrency.lockutils [req-2eb94706-12ef-4001-a12e-b8be82cb848c req-9fd26a71-b34c-4613-abb1-18e9c49470bc service nova] Acquiring lock "refresh_cache-3085a70f-360c-43a3-80d7-e7b87fb3e146" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.500666] env[61629]: DEBUG oslo_concurrency.lockutils [req-2eb94706-12ef-4001-a12e-b8be82cb848c req-9fd26a71-b34c-4613-abb1-18e9c49470bc service nova] Acquired lock "refresh_cache-3085a70f-360c-43a3-80d7-e7b87fb3e146" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.500841] env[61629]: DEBUG nova.network.neutron [req-2eb94706-12ef-4001-a12e-b8be82cb848c req-9fd26a71-b34c-4613-abb1-18e9c49470bc service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Refreshing network info cache for port f88f7616-a027-435a-b8a9-2a3cfffadd38 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 906.530546] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354282, 'name': ReconfigVM_Task, 'duration_secs': 0.479957} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.531036] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 7cf87381-235e-449b-8269-61c2d4033028/7cf87381-235e-449b-8269-61c2d4033028.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 906.531783] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0f4e703-2bae-47d9-b18b-5a5890451c3d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.540741] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 906.540741] env[61629]: value = "task-1354285" [ 906.540741] env[61629]: _type = "Task" [ 906.540741] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.549908] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52ed01bd-b7c9-b0a1-df13-ae0bd05e7757, 'name': SearchDatastore_Task, 'duration_secs': 0.024607} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.553453] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.553761] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 7c3e9d0f-88a8-41fe-bf61-e3db34d36928/7c3e9d0f-88a8-41fe-bf61-e3db34d36928.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 906.554590] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3688fc72-41fa-403d-bd99-f47911bd2f0a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.559950] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354285, 'name': Rename_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.564671] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354278, 'name': Destroy_Task, 'duration_secs': 0.943259} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.566152] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Destroyed the VM [ 906.566152] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Deleting Snapshot of the VM instance {{(pid=61629) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 906.566475] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 906.566475] env[61629]: value = "task-1354286" [ 906.566475] env[61629]: _type = "Task" [ 906.566475] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.566726] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-58cea0ab-219a-4234-86d5-18733bbb87fa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.578926] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354286, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.580277] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 906.580277] env[61629]: value = "task-1354287" [ 906.580277] env[61629]: _type = "Task" [ 906.580277] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.588927] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354287, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.627530] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 906.627777] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 906.628055] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Deleting the datastore file [datastore2] 1d451558-dbbc-4942-b739-5d4b88057a75 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 906.628340] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d596b6d-e8cd-4fe1-97a7-fab8358f62f1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.634969] env[61629]: DEBUG oslo_vmware.api [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for the task: (returnval){ [ 906.634969] env[61629]: value = "task-1354288" [ 906.634969] env[61629]: _type = "Task" [ 906.634969] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.644097] env[61629]: DEBUG oslo_vmware.api [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354288, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.956188] env[61629]: DEBUG nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Updated inventory for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with generation 94 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 906.956444] env[61629]: DEBUG nova.compute.provider_tree [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Updating resource provider d075eff1-6f77-44a8-824e-16f3e03b4063 generation from 94 to 95 during operation: update_inventory {{(pid=61629) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 906.956688] env[61629]: DEBUG nova.compute.provider_tree [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 906.972194] env[61629]: DEBUG oslo_vmware.api [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354276, 'name': PowerOnVM_Task, 'duration_secs': 1.163208} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.973071] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 906.973071] env[61629]: DEBUG nova.compute.manager [None req-902aaa7b-3c51-451f-8f9d-084e5e50831d tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 906.975171] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ca220a-04ae-4342-8eba-74418848ee62 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.061516] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354285, 'name': Rename_Task, 'duration_secs': 0.147873} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.062096] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 907.066561] env[61629]: DEBUG nova.network.neutron [req-2eb94706-12ef-4001-a12e-b8be82cb848c req-9fd26a71-b34c-4613-abb1-18e9c49470bc service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 907.071020] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d3b7e06-7648-4d47-bc4a-c6495f5f82a0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.082660] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354286, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.088028] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 907.088028] env[61629]: value = "task-1354289" [ 907.088028] env[61629]: _type = "Task" [ 907.088028] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.096072] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354287, 'name': RemoveSnapshot_Task, 'duration_secs': 0.448233} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.097019] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Deleted Snapshot of the VM instance {{(pid=61629) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 907.097505] env[61629]: DEBUG nova.compute.manager [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 907.099424] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201156c6-e7a5-4d6d-b5b3-91fd09e7d1f4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.106428] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354289, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.144371] env[61629]: DEBUG nova.compute.manager [req-37199fe0-f570-42ce-8038-319b02fbdd5d req-7beb4213-9d60-40ed-a106-e69eea4d9109 service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Received event network-changed-5bb08edd-3639-401f-9e54-26abd98b246e {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 907.144371] env[61629]: DEBUG nova.compute.manager [req-37199fe0-f570-42ce-8038-319b02fbdd5d req-7beb4213-9d60-40ed-a106-e69eea4d9109 service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Refreshing instance network info cache due to event network-changed-5bb08edd-3639-401f-9e54-26abd98b246e. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 907.144740] env[61629]: DEBUG oslo_concurrency.lockutils [req-37199fe0-f570-42ce-8038-319b02fbdd5d req-7beb4213-9d60-40ed-a106-e69eea4d9109 service nova] Acquiring lock "refresh_cache-274e3437-eacd-4299-9c27-97bbb0ebf1c1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.144893] env[61629]: DEBUG oslo_concurrency.lockutils [req-37199fe0-f570-42ce-8038-319b02fbdd5d req-7beb4213-9d60-40ed-a106-e69eea4d9109 service nova] Acquired lock "refresh_cache-274e3437-eacd-4299-9c27-97bbb0ebf1c1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.145071] env[61629]: DEBUG nova.network.neutron [req-37199fe0-f570-42ce-8038-319b02fbdd5d req-7beb4213-9d60-40ed-a106-e69eea4d9109 service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Refreshing network info cache for port 5bb08edd-3639-401f-9e54-26abd98b246e {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 907.151804] env[61629]: DEBUG oslo_vmware.api [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Task: {'id': task-1354288, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198953} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.152071] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 907.152263] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 907.152434] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 907.152608] env[61629]: INFO nova.compute.manager [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Took 1.78 seconds to destroy the instance on the hypervisor. [ 907.152968] env[61629]: DEBUG oslo.service.loopingcall [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 907.155490] env[61629]: DEBUG nova.compute.manager [-] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 907.155543] env[61629]: DEBUG nova.network.neutron [-] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 907.249173] env[61629]: DEBUG nova.network.neutron [req-2eb94706-12ef-4001-a12e-b8be82cb848c req-9fd26a71-b34c-4613-abb1-18e9c49470bc service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.466234] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61629) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 907.466234] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.869s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.466234] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.562s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.466234] env[61629]: INFO nova.compute.claims [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 907.508107] env[61629]: DEBUG nova.network.neutron [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Successfully updated port: 6df3f8f5-6430-44cb-ac3e-34209467a856 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 907.584149] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354286, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563702} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.584459] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 7c3e9d0f-88a8-41fe-bf61-e3db34d36928/7c3e9d0f-88a8-41fe-bf61-e3db34d36928.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 907.584682] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 907.584937] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-966280e3-debb-4cd3-9e3d-b39b0223bdc1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.593306] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 907.593306] env[61629]: value = "task-1354290" [ 907.593306] env[61629]: _type = "Task" [ 907.593306] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.599430] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354289, 'name': PowerOnVM_Task} progress is 75%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.605808] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354290, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.618425] env[61629]: INFO nova.compute.manager [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Shelve offloading [ 907.620317] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 907.620612] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00501ceb-7695-46c1-af1f-08ee01c08eba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.628406] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 907.628406] env[61629]: value = "task-1354291" [ 907.628406] env[61629]: _type = "Task" [ 907.628406] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.639421] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354291, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.753689] env[61629]: DEBUG oslo_concurrency.lockutils [req-2eb94706-12ef-4001-a12e-b8be82cb848c req-9fd26a71-b34c-4613-abb1-18e9c49470bc service nova] Releasing lock "refresh_cache-3085a70f-360c-43a3-80d7-e7b87fb3e146" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.930840] env[61629]: DEBUG nova.network.neutron [-] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.948603] env[61629]: DEBUG nova.network.neutron [req-37199fe0-f570-42ce-8038-319b02fbdd5d req-7beb4213-9d60-40ed-a106-e69eea4d9109 service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Updated VIF entry in instance network info cache for port 5bb08edd-3639-401f-9e54-26abd98b246e. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 907.949184] env[61629]: DEBUG nova.network.neutron [req-37199fe0-f570-42ce-8038-319b02fbdd5d req-7beb4213-9d60-40ed-a106-e69eea4d9109 service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Updating instance_info_cache with network_info: [{"id": "5bb08edd-3639-401f-9e54-26abd98b246e", "address": "fa:16:3e:19:1e:55", "network": {"id": "a022be65-b398-460a-a741-9190e3d7e38d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1776770394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.204", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a35cec60cf464a1c9f8215dbc6403a84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bb08edd-36", "ovs_interfaceid": "5bb08edd-3639-401f-9e54-26abd98b246e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.010770] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "refresh_cache-3085a70f-360c-43a3-80d7-e7b87fb3e146" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.010936] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquired lock "refresh_cache-3085a70f-360c-43a3-80d7-e7b87fb3e146" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.011116] env[61629]: DEBUG nova.network.neutron [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 908.104799] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354290, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070474} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.106366] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 908.106730] env[61629]: DEBUG oslo_vmware.api [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354289, 'name': PowerOnVM_Task, 'duration_secs': 0.994471} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.107436] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58f0936-fe0c-4e2c-81c2-86b92306d8a6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.109875] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 908.110104] env[61629]: INFO nova.compute.manager [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Took 9.43 seconds to spawn the instance on the hypervisor. [ 908.110292] env[61629]: DEBUG nova.compute.manager [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 908.111012] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d56fba-0004-4a2f-b639-579dfce85d2d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.139329] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 7c3e9d0f-88a8-41fe-bf61-e3db34d36928/7c3e9d0f-88a8-41fe-bf61-e3db34d36928.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.139329] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78feac9f-524a-4ca8-ae74-6fb46dff86be {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.165493] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] VM already powered off {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 908.165687] env[61629]: DEBUG nova.compute.manager [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 908.165977] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 908.165977] env[61629]: value = "task-1354293" [ 908.165977] env[61629]: _type = "Task" [ 908.165977] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.166745] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea25ef11-180a-4ee6-9700-81d5e14626a4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.175995] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.176191] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquired lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.176364] env[61629]: DEBUG nova.network.neutron [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 908.180295] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354293, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.347048] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Volume attach. Driver type: vmdk {{(pid=61629) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 908.347048] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288532', 'volume_id': '5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66', 'name': 'volume-5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2b01eeae-64be-44b3-b4cf-c2a8490043e3', 'attached_at': '', 'detached_at': '', 'volume_id': '5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66', 'serial': '5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66'} {{(pid=61629) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 908.347048] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6059b9b6-aa76-4be7-b01f-1895d0fa587c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.365327] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678db61c-a17e-45fe-99f2-e635cdfaf016 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.390325] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] volume-5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66/volume-5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.390653] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ce0bca6-8b01-4787-af14-d7ef60b8b969 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.409760] env[61629]: DEBUG oslo_vmware.api [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 908.409760] env[61629]: value = "task-1354294" [ 908.409760] env[61629]: _type = "Task" [ 908.409760] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.418725] env[61629]: DEBUG oslo_vmware.api [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354294, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.433336] env[61629]: INFO nova.compute.manager [-] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Took 1.28 seconds to deallocate network for instance. [ 908.451935] env[61629]: DEBUG oslo_concurrency.lockutils [req-37199fe0-f570-42ce-8038-319b02fbdd5d req-7beb4213-9d60-40ed-a106-e69eea4d9109 service nova] Releasing lock "refresh_cache-274e3437-eacd-4299-9c27-97bbb0ebf1c1" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.528679] env[61629]: DEBUG nova.compute.manager [req-b6acd710-8058-4e8e-8851-3af10d4eda50 req-6de00d8e-581a-411d-82a6-e3c77ea10836 service nova] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Received event network-vif-deleted-e635a96a-7254-4754-9409-d9fc4a443cb5 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 908.551905] env[61629]: DEBUG nova.network.neutron [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 908.653275] env[61629]: INFO nova.compute.manager [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Took 31.26 seconds to build instance. [ 908.686366] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354293, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.779275] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4879db82-242c-4c5f-aad3-b650710eddb9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.788597] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bebd000-b037-4e6a-b2d5-0c54eeed49f9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.828201] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532ef358-9f48-4c80-9c95-924009c53bf2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.836507] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56775254-2999-4fdf-8d12-20dc0309074b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.851339] env[61629]: DEBUG nova.compute.provider_tree [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.920996] env[61629]: DEBUG oslo_vmware.api [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354294, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.940884] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.942503] env[61629]: DEBUG nova.network.neutron [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Updating instance_info_cache with network_info: [{"id": "f88f7616-a027-435a-b8a9-2a3cfffadd38", "address": "fa:16:3e:b6:ce:85", "network": {"id": "ca07fdbe-0d38-4985-a1e0-496fa18a8f8d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1717731241", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.16", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf88f7616-a0", "ovs_interfaceid": "f88f7616-a027-435a-b8a9-2a3cfffadd38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6df3f8f5-6430-44cb-ac3e-34209467a856", "address": "fa:16:3e:99:0a:dc", "network": {"id": "ab342a79-04a2-4999-a794-fb400ae31da7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1783312763", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.200", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6df3f8f5-64", "ovs_interfaceid": "6df3f8f5-6430-44cb-ac3e-34209467a856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.006037] env[61629]: DEBUG nova.network.neutron [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Updating instance_info_cache with network_info: [{"id": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "address": "fa:16:3e:7d:7e:9a", "network": {"id": "7ab21805-1836-4ac0-94d2-d715f9f3352e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1256584900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc5fe81fb0eb4820825cc8e97b8fe4f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8a895f7-ad", "ovs_interfaceid": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.155390] env[61629]: DEBUG oslo_concurrency.lockutils [None req-badac49f-42b3-43ac-a844-ec05fc3fb67f tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "7cf87381-235e-449b-8269-61c2d4033028" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.768s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.180463] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354293, 'name': ReconfigVM_Task, 'duration_secs': 0.597188} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.180814] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 7c3e9d0f-88a8-41fe-bf61-e3db34d36928/7c3e9d0f-88a8-41fe-bf61-e3db34d36928.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 909.181487] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-128eaade-032b-4b38-b565-bc681973cb67 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.188558] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 909.188558] env[61629]: value = "task-1354295" [ 909.188558] env[61629]: _type = "Task" [ 909.188558] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.197383] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354295, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.259413] env[61629]: DEBUG nova.compute.manager [req-161a591c-7b6f-4b12-ba7e-9081347744e1 req-9dca8081-5fb1-4a60-a8ad-dcbb82603d88 service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Received event network-vif-plugged-6df3f8f5-6430-44cb-ac3e-34209467a856 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 909.259651] env[61629]: DEBUG oslo_concurrency.lockutils [req-161a591c-7b6f-4b12-ba7e-9081347744e1 req-9dca8081-5fb1-4a60-a8ad-dcbb82603d88 service nova] Acquiring lock "3085a70f-360c-43a3-80d7-e7b87fb3e146-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.259856] env[61629]: DEBUG oslo_concurrency.lockutils [req-161a591c-7b6f-4b12-ba7e-9081347744e1 req-9dca8081-5fb1-4a60-a8ad-dcbb82603d88 service nova] Lock "3085a70f-360c-43a3-80d7-e7b87fb3e146-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.260046] env[61629]: DEBUG oslo_concurrency.lockutils [req-161a591c-7b6f-4b12-ba7e-9081347744e1 req-9dca8081-5fb1-4a60-a8ad-dcbb82603d88 service nova] Lock "3085a70f-360c-43a3-80d7-e7b87fb3e146-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.260224] env[61629]: DEBUG nova.compute.manager [req-161a591c-7b6f-4b12-ba7e-9081347744e1 req-9dca8081-5fb1-4a60-a8ad-dcbb82603d88 service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] No waiting events found dispatching network-vif-plugged-6df3f8f5-6430-44cb-ac3e-34209467a856 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 909.260393] env[61629]: WARNING nova.compute.manager [req-161a591c-7b6f-4b12-ba7e-9081347744e1 req-9dca8081-5fb1-4a60-a8ad-dcbb82603d88 service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Received unexpected event network-vif-plugged-6df3f8f5-6430-44cb-ac3e-34209467a856 for instance with vm_state building and task_state spawning. [ 909.260559] env[61629]: DEBUG nova.compute.manager [req-161a591c-7b6f-4b12-ba7e-9081347744e1 req-9dca8081-5fb1-4a60-a8ad-dcbb82603d88 service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Received event network-changed-6df3f8f5-6430-44cb-ac3e-34209467a856 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 909.260716] env[61629]: DEBUG nova.compute.manager [req-161a591c-7b6f-4b12-ba7e-9081347744e1 req-9dca8081-5fb1-4a60-a8ad-dcbb82603d88 service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Refreshing instance network info cache due to event network-changed-6df3f8f5-6430-44cb-ac3e-34209467a856. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 909.260888] env[61629]: DEBUG oslo_concurrency.lockutils [req-161a591c-7b6f-4b12-ba7e-9081347744e1 req-9dca8081-5fb1-4a60-a8ad-dcbb82603d88 service nova] Acquiring lock "refresh_cache-3085a70f-360c-43a3-80d7-e7b87fb3e146" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.360458] env[61629]: DEBUG nova.scheduler.client.report [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 909.421612] env[61629]: DEBUG oslo_vmware.api [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354294, 'name': ReconfigVM_Task, 'duration_secs': 0.58657} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.422240] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Reconfigured VM instance instance-00000043 to attach disk [datastore1] volume-5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66/volume-5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 909.427714] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ea51196-9f73-428e-8f94-36e773174ee4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.448020] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Releasing lock "refresh_cache-3085a70f-360c-43a3-80d7-e7b87fb3e146" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.448020] env[61629]: DEBUG nova.compute.manager [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Instance network_info: |[{"id": "f88f7616-a027-435a-b8a9-2a3cfffadd38", "address": "fa:16:3e:b6:ce:85", "network": {"id": "ca07fdbe-0d38-4985-a1e0-496fa18a8f8d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1717731241", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.16", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf88f7616-a0", "ovs_interfaceid": "f88f7616-a027-435a-b8a9-2a3cfffadd38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6df3f8f5-6430-44cb-ac3e-34209467a856", "address": "fa:16:3e:99:0a:dc", "network": {"id": "ab342a79-04a2-4999-a794-fb400ae31da7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1783312763", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.200", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6df3f8f5-64", "ovs_interfaceid": "6df3f8f5-6430-44cb-ac3e-34209467a856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 909.448020] env[61629]: DEBUG oslo_vmware.api [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 909.448020] env[61629]: value = "task-1354296" [ 909.448020] env[61629]: _type = "Task" [ 909.448020] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.448020] env[61629]: DEBUG oslo_concurrency.lockutils [req-161a591c-7b6f-4b12-ba7e-9081347744e1 req-9dca8081-5fb1-4a60-a8ad-dcbb82603d88 service nova] Acquired lock "refresh_cache-3085a70f-360c-43a3-80d7-e7b87fb3e146" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.448020] env[61629]: DEBUG nova.network.neutron [req-161a591c-7b6f-4b12-ba7e-9081347744e1 req-9dca8081-5fb1-4a60-a8ad-dcbb82603d88 service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Refreshing network info cache for port 6df3f8f5-6430-44cb-ac3e-34209467a856 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 909.450022] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:ce:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1895250-76cc-41f7-b7f8-2e5679494607', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f88f7616-a027-435a-b8a9-2a3cfffadd38', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:0a:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6df3f8f5-6430-44cb-ac3e-34209467a856', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 909.460787] env[61629]: DEBUG oslo.service.loopingcall [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 909.464555] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 909.469304] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f82695fe-2fce-4f81-bf79-662f5eb51b60 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.496490] env[61629]: DEBUG oslo_vmware.api [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354296, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.497866] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 909.497866] env[61629]: value = "task-1354297" [ 909.497866] env[61629]: _type = "Task" [ 909.497866] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.507149] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354297, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.508799] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Releasing lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.701755] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354295, 'name': Rename_Task, 'duration_secs': 0.360192} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.702191] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.702569] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3dced6f-fae6-4872-b713-e86d7a548bbd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.710093] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 909.710093] env[61629]: value = "task-1354298" [ 909.710093] env[61629]: _type = "Task" [ 909.710093] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.719468] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354298, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.796073] env[61629]: DEBUG nova.network.neutron [req-161a591c-7b6f-4b12-ba7e-9081347744e1 req-9dca8081-5fb1-4a60-a8ad-dcbb82603d88 service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Updated VIF entry in instance network info cache for port 6df3f8f5-6430-44cb-ac3e-34209467a856. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 909.796642] env[61629]: DEBUG nova.network.neutron [req-161a591c-7b6f-4b12-ba7e-9081347744e1 req-9dca8081-5fb1-4a60-a8ad-dcbb82603d88 service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Updating instance_info_cache with network_info: [{"id": "f88f7616-a027-435a-b8a9-2a3cfffadd38", "address": "fa:16:3e:b6:ce:85", "network": {"id": "ca07fdbe-0d38-4985-a1e0-496fa18a8f8d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1717731241", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.16", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf88f7616-a0", "ovs_interfaceid": "f88f7616-a027-435a-b8a9-2a3cfffadd38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6df3f8f5-6430-44cb-ac3e-34209467a856", "address": "fa:16:3e:99:0a:dc", "network": {"id": "ab342a79-04a2-4999-a794-fb400ae31da7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1783312763", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.200", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4ef41f406d18447fbee4e7b7ae52a2d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6df3f8f5-64", "ovs_interfaceid": "6df3f8f5-6430-44cb-ac3e-34209467a856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.868248] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.403s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.868959] env[61629]: DEBUG nova.compute.manager [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 909.872868] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.666s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.873206] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.876625] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.810s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.877297] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.879306] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.378s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.882154] env[61629]: INFO nova.compute.claims [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 909.925634] env[61629]: INFO nova.scheduler.client.report [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Deleted allocations for instance da1eb7f9-7562-40c8-955b-c11f831b7bc8 [ 909.932861] env[61629]: INFO nova.scheduler.client.report [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleted allocations for instance 55f2d2fc-9404-422f-ba08-72e6e11a089f [ 909.972535] env[61629]: DEBUG oslo_vmware.api [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354296, 'name': ReconfigVM_Task, 'duration_secs': 0.190457} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.972998] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288532', 'volume_id': '5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66', 'name': 'volume-5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2b01eeae-64be-44b3-b4cf-c2a8490043e3', 'attached_at': '', 'detached_at': '', 'volume_id': '5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66', 'serial': '5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66'} {{(pid=61629) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 910.010109] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354297, 'name': CreateVM_Task, 'duration_secs': 0.433382} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.011292] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 910.011771] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 910.012520] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5f9e93-c728-4afd-9668-227351c25f6e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.018486] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.018486] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.018486] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 910.018486] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-245bc048-6346-46c8-91ca-9a48789eabae {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.021039] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Volume attach. Driver type: vmdk {{(pid=61629) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 910.021280] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288534', 'volume_id': '038d52b8-1702-41d3-b2ef-775b800e1724', 'name': 'volume-038d52b8-1702-41d3-b2ef-775b800e1724', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a42d5132-22e5-4551-80d2-fb7a55a7fa9e', 'attached_at': '', 'detached_at': '', 'volume_id': '038d52b8-1702-41d3-b2ef-775b800e1724', 'serial': '038d52b8-1702-41d3-b2ef-775b800e1724'} {{(pid=61629) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 910.022079] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210690e7-cb0e-41a9-b4ad-0f9ed43558db {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.027248] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 910.028462] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02503c76-d3e2-4e33-8ea2-baf5c36a363b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.030886] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 910.030886] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]520812f7-8bd2-c84a-d4d4-e904d9973960" [ 910.030886] env[61629]: _type = "Task" [ 910.030886] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.046059] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a76390f-6322-47f6-9c85-9f77254def3c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.056756] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]520812f7-8bd2-c84a-d4d4-e904d9973960, 'name': SearchDatastore_Task, 'duration_secs': 0.013039} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.070216] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.070532] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 910.070776] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.070931] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.071132] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 910.079235] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] volume-038d52b8-1702-41d3-b2ef-775b800e1724/volume-038d52b8-1702-41d3-b2ef-775b800e1724.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 910.079523] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c5df355-fac5-4645-9f73-4827b3d5a0a2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.081529] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8589f0d-2032-46f8-8168-44ed5332c02f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.101544] env[61629]: DEBUG oslo_vmware.api [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 910.101544] env[61629]: value = "task-1354300" [ 910.101544] env[61629]: _type = "Task" [ 910.101544] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.102784] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 910.102969] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 910.106763] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcc44460-1d87-4c9a-8b04-3aa1af02f67c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.116342] env[61629]: DEBUG oslo_vmware.api [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354300, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.117586] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 910.117586] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52007b07-9aaa-0e67-cf52-9cde3e3d342e" [ 910.117586] env[61629]: _type = "Task" [ 910.117586] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.125911] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52007b07-9aaa-0e67-cf52-9cde3e3d342e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.150036] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 910.150036] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 910.150036] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Deleting the datastore file [datastore2] fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 910.150359] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0d2ebfe-05a3-426b-838b-06fab7bd56b7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.159614] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 910.159614] env[61629]: value = "task-1354301" [ 910.159614] env[61629]: _type = "Task" [ 910.159614] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.168741] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.222069] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354298, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.301407] env[61629]: DEBUG oslo_concurrency.lockutils [req-161a591c-7b6f-4b12-ba7e-9081347744e1 req-9dca8081-5fb1-4a60-a8ad-dcbb82603d88 service nova] Releasing lock "refresh_cache-3085a70f-360c-43a3-80d7-e7b87fb3e146" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.386133] env[61629]: DEBUG nova.compute.utils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 910.394595] env[61629]: DEBUG nova.compute.manager [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 910.394595] env[61629]: DEBUG nova.network.neutron [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 910.439272] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3f7f34ff-55c7-4544-bb5d-736b5c26c1af tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "da1eb7f9-7562-40c8-955b-c11f831b7bc8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.549s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.445112] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a0eebfea-2b00-46a9-838b-16226cafc3b8 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "55f2d2fc-9404-422f-ba08-72e6e11a089f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.509s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.469902] env[61629]: DEBUG nova.policy [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '08c4f87f2daa4317bb48ef682ca24836', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a2371a7e8504b14bd3788e9d9bd92a2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 910.618725] env[61629]: DEBUG oslo_vmware.api [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354300, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.636942] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52007b07-9aaa-0e67-cf52-9cde3e3d342e, 'name': SearchDatastore_Task, 'duration_secs': 0.031081} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.638623] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8b24d9b-269b-47cf-ad36-7afb5749a2ac {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.646241] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 910.646241] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5270d3f5-488c-c0e7-90c0-7ff6824befbb" [ 910.646241] env[61629]: _type = "Task" [ 910.646241] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.658951] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5270d3f5-488c-c0e7-90c0-7ff6824befbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.669981] env[61629]: DEBUG oslo_vmware.api [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207116} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.670286] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 910.670482] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 910.670666] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 910.696654] env[61629]: INFO nova.scheduler.client.report [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Deleted allocations for instance fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4 [ 910.721948] env[61629]: DEBUG oslo_vmware.api [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354298, 'name': PowerOnVM_Task, 'duration_secs': 0.624641} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.722348] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 910.722596] env[61629]: INFO nova.compute.manager [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Took 9.55 seconds to spawn the instance on the hypervisor. [ 910.722838] env[61629]: DEBUG nova.compute.manager [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 910.723658] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132e7c7a-e6c2-4137-838f-75f3378aa6d5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.805503] env[61629]: DEBUG nova.compute.manager [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Stashing vm_state: active {{(pid=61629) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 910.835718] env[61629]: DEBUG nova.network.neutron [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Successfully created port: 787f3ff1-d4f3-429f-8ee5-a5785d993cfc {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 910.877941] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "d37958f8-7607-418b-9cfd-c3a5df721e94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.878230] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "d37958f8-7607-418b-9cfd-c3a5df721e94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.878462] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "d37958f8-7607-418b-9cfd-c3a5df721e94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.878656] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "d37958f8-7607-418b-9cfd-c3a5df721e94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.878828] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "d37958f8-7607-418b-9cfd-c3a5df721e94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.884613] env[61629]: INFO nova.compute.manager [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Terminating instance [ 910.886824] env[61629]: DEBUG nova.compute.manager [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 910.887078] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 910.888414] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228c5923-b885-45b9-a1b4-66940a4837aa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.900128] env[61629]: DEBUG nova.compute.manager [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 910.902986] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 910.903823] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41c3ea4f-307e-4d9b-bd47-a79b7fd2a697 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.916448] env[61629]: DEBUG oslo_vmware.api [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 910.916448] env[61629]: value = "task-1354302" [ 910.916448] env[61629]: _type = "Task" [ 910.916448] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.928222] env[61629]: DEBUG oslo_vmware.api [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.013679] env[61629]: DEBUG nova.objects.instance [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lazy-loading 'flavor' on Instance uuid 2b01eeae-64be-44b3-b4cf-c2a8490043e3 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 911.113345] env[61629]: DEBUG oslo_vmware.api [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354300, 'name': ReconfigVM_Task, 'duration_secs': 0.575808} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.116304] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Reconfigured VM instance instance-00000050 to attach disk [datastore2] volume-038d52b8-1702-41d3-b2ef-775b800e1724/volume-038d52b8-1702-41d3-b2ef-775b800e1724.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 911.123252] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2cfefec-9938-4bfe-a27e-875862c0a4c3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.149031] env[61629]: DEBUG oslo_vmware.api [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 911.149031] env[61629]: value = "task-1354303" [ 911.149031] env[61629]: _type = "Task" [ 911.149031] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.166410] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5270d3f5-488c-c0e7-90c0-7ff6824befbb, 'name': SearchDatastore_Task, 'duration_secs': 0.012415} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.171184] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.171732] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 3085a70f-360c-43a3-80d7-e7b87fb3e146/3085a70f-360c-43a3-80d7-e7b87fb3e146.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 911.172497] env[61629]: DEBUG oslo_vmware.api [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354303, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.172828] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff2f785a-bdf6-4e43-a62d-3a67d0147067 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.185022] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 911.185022] env[61629]: value = "task-1354304" [ 911.185022] env[61629]: _type = "Task" [ 911.185022] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.192397] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354304, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.202018] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.232772] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a647e6c8-43af-46f7-afb5-dd9effa09275 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.249706] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec4a83a-d70e-4c2f-86d6-822df311e41a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.253471] env[61629]: INFO nova.compute.manager [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Took 33.35 seconds to build instance. [ 911.290854] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e036aaf5-8fc5-4ec1-bdd2-a4c3e509e47b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.301982] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2f20ad-1b4e-4e65-a623-0c594ecfe1a8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.321802] env[61629]: DEBUG nova.compute.provider_tree [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 911.334242] env[61629]: DEBUG nova.compute.manager [req-ce2717ce-5989-4ced-a1c7-9f7ba62a6372 req-b1ab36d0-38f3-44a9-b632-2e4fcebdf1ba service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Received event network-vif-unplugged-b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 911.334457] env[61629]: DEBUG oslo_concurrency.lockutils [req-ce2717ce-5989-4ced-a1c7-9f7ba62a6372 req-b1ab36d0-38f3-44a9-b632-2e4fcebdf1ba service nova] Acquiring lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.334793] env[61629]: DEBUG oslo_concurrency.lockutils [req-ce2717ce-5989-4ced-a1c7-9f7ba62a6372 req-b1ab36d0-38f3-44a9-b632-2e4fcebdf1ba service nova] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.334984] env[61629]: DEBUG oslo_concurrency.lockutils [req-ce2717ce-5989-4ced-a1c7-9f7ba62a6372 req-b1ab36d0-38f3-44a9-b632-2e4fcebdf1ba service nova] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.335174] env[61629]: DEBUG nova.compute.manager [req-ce2717ce-5989-4ced-a1c7-9f7ba62a6372 req-b1ab36d0-38f3-44a9-b632-2e4fcebdf1ba service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] No waiting events found dispatching network-vif-unplugged-b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 911.335347] env[61629]: WARNING nova.compute.manager [req-ce2717ce-5989-4ced-a1c7-9f7ba62a6372 req-b1ab36d0-38f3-44a9-b632-2e4fcebdf1ba service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Received unexpected event network-vif-unplugged-b8a895f7-ad9d-4d49-8460-de82459d88f7 for instance with vm_state shelved_offloaded and task_state None. [ 911.335511] env[61629]: DEBUG nova.compute.manager [req-ce2717ce-5989-4ced-a1c7-9f7ba62a6372 req-b1ab36d0-38f3-44a9-b632-2e4fcebdf1ba service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Received event network-changed-b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 911.335687] env[61629]: DEBUG nova.compute.manager [req-ce2717ce-5989-4ced-a1c7-9f7ba62a6372 req-b1ab36d0-38f3-44a9-b632-2e4fcebdf1ba service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Refreshing instance network info cache due to event network-changed-b8a895f7-ad9d-4d49-8460-de82459d88f7. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 911.335942] env[61629]: DEBUG oslo_concurrency.lockutils [req-ce2717ce-5989-4ced-a1c7-9f7ba62a6372 req-b1ab36d0-38f3-44a9-b632-2e4fcebdf1ba service nova] Acquiring lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.336037] env[61629]: DEBUG oslo_concurrency.lockutils [req-ce2717ce-5989-4ced-a1c7-9f7ba62a6372 req-b1ab36d0-38f3-44a9-b632-2e4fcebdf1ba service nova] Acquired lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.336229] env[61629]: DEBUG nova.network.neutron [req-ce2717ce-5989-4ced-a1c7-9f7ba62a6372 req-b1ab36d0-38f3-44a9-b632-2e4fcebdf1ba service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Refreshing network info cache for port b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 911.338463] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.427767] env[61629]: DEBUG oslo_vmware.api [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354302, 'name': PowerOffVM_Task, 'duration_secs': 0.381529} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.428058] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 911.428231] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 911.428498] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-262bc64f-f199-48d4-8907-14aa6b9a5b32 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.513203] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 911.515392] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 911.515677] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Deleting the datastore file [datastore1] d37958f8-7607-418b-9cfd-c3a5df721e94 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 911.515977] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74dc51ae-1b7a-435e-b392-afc306eeb960 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.523825] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4c274bdf-8ba6-4974-bb70-95f31b7f4664 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.785s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.530173] env[61629]: DEBUG oslo_vmware.api [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 911.530173] env[61629]: value = "task-1354306" [ 911.530173] env[61629]: _type = "Task" [ 911.530173] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.544678] env[61629]: DEBUG oslo_vmware.api [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354306, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.666344] env[61629]: DEBUG oslo_vmware.api [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354303, 'name': ReconfigVM_Task, 'duration_secs': 0.18538} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.666872] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288534', 'volume_id': '038d52b8-1702-41d3-b2ef-775b800e1724', 'name': 'volume-038d52b8-1702-41d3-b2ef-775b800e1724', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a42d5132-22e5-4551-80d2-fb7a55a7fa9e', 'attached_at': '', 'detached_at': '', 'volume_id': '038d52b8-1702-41d3-b2ef-775b800e1724', 'serial': '038d52b8-1702-41d3-b2ef-775b800e1724'} {{(pid=61629) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 911.695807] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354304, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.756156] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0230baba-f440-4083-8f09-5deaf47cce62 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.242s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.771413] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.771679] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.845763] env[61629]: ERROR nova.scheduler.client.report [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [req-1cbac432-8ef5-44d7-9514-9ba8c2e9ea56] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID d075eff1-6f77-44a8-824e-16f3e03b4063. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1cbac432-8ef5-44d7-9514-9ba8c2e9ea56"}]} [ 911.867448] env[61629]: DEBUG nova.scheduler.client.report [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Refreshing inventories for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 911.898307] env[61629]: DEBUG nova.scheduler.client.report [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Updating ProviderTree inventory for provider d075eff1-6f77-44a8-824e-16f3e03b4063 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 911.898587] env[61629]: DEBUG nova.compute.provider_tree [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 911.914632] env[61629]: DEBUG nova.compute.manager [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 911.918174] env[61629]: DEBUG nova.scheduler.client.report [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Refreshing aggregate associations for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063, aggregates: None {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 911.951498] env[61629]: DEBUG nova.scheduler.client.report [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Refreshing trait associations for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 911.966133] env[61629]: DEBUG nova.virt.hardware [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 911.966661] env[61629]: DEBUG nova.virt.hardware [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 911.966932] env[61629]: DEBUG nova.virt.hardware [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 911.967223] env[61629]: DEBUG nova.virt.hardware [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 911.967451] env[61629]: DEBUG nova.virt.hardware [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 911.967930] env[61629]: DEBUG nova.virt.hardware [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 911.968039] env[61629]: DEBUG nova.virt.hardware [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 911.968242] env[61629]: DEBUG nova.virt.hardware [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 911.968509] env[61629]: DEBUG nova.virt.hardware [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 911.968806] env[61629]: DEBUG nova.virt.hardware [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 911.969124] env[61629]: DEBUG nova.virt.hardware [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 911.970186] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7dc0509-5023-47ce-9e55-c827ed6c14c6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.981920] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62297ddd-6cc8-4bee-b589-b8818fee554b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.040525] env[61629]: DEBUG oslo_vmware.api [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354306, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.194814] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354304, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.737446} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.197467] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 3085a70f-360c-43a3-80d7-e7b87fb3e146/3085a70f-360c-43a3-80d7-e7b87fb3e146.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 912.197714] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 912.198782] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e70d1d7-785f-4878-8d46-31b18d94fa8d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.205944] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 912.205944] env[61629]: value = "task-1354307" [ 912.205944] env[61629]: _type = "Task" [ 912.205944] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.215539] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354307, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.238277] env[61629]: DEBUG nova.network.neutron [req-ce2717ce-5989-4ced-a1c7-9f7ba62a6372 req-b1ab36d0-38f3-44a9-b632-2e4fcebdf1ba service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Updated VIF entry in instance network info cache for port b8a895f7-ad9d-4d49-8460-de82459d88f7. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 912.238595] env[61629]: DEBUG nova.network.neutron [req-ce2717ce-5989-4ced-a1c7-9f7ba62a6372 req-b1ab36d0-38f3-44a9-b632-2e4fcebdf1ba service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Updating instance_info_cache with network_info: [{"id": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "address": "fa:16:3e:7d:7e:9a", "network": {"id": "7ab21805-1836-4ac0-94d2-d715f9f3352e", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1256584900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc5fe81fb0eb4820825cc8e97b8fe4f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapb8a895f7-ad", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.278689] env[61629]: DEBUG nova.compute.manager [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 912.330823] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d0e4a2-f5a5-4338-b86e-fe071592de4d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.343874] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "2ce60374-7baf-4d27-afbd-dcfaf6600a78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.344281] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "2ce60374-7baf-4d27-afbd-dcfaf6600a78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.346315] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58aede3-7afd-4abf-8fd2-00df2e41552f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.384290] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2880fe8b-554f-4447-803b-d6a61f7f1f12 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.392833] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4da9877-6670-4e80-8016-cf7c60016c52 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.414791] env[61629]: DEBUG nova.compute.provider_tree [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 912.542122] env[61629]: DEBUG oslo_vmware.api [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354306, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.684862} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.542400] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 912.542592] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 912.542773] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 912.543193] env[61629]: INFO nova.compute.manager [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Took 1.66 seconds to destroy the instance on the hypervisor. [ 912.543460] env[61629]: DEBUG oslo.service.loopingcall [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 912.543667] env[61629]: DEBUG nova.compute.manager [-] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 912.543750] env[61629]: DEBUG nova.network.neutron [-] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 912.719462] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354307, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.233126} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.719579] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 912.720428] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086cdac3-e765-411c-aba1-5779addcdd8d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.725320] env[61629]: DEBUG nova.network.neutron [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Successfully updated port: 787f3ff1-d4f3-429f-8ee5-a5785d993cfc {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 912.727922] env[61629]: DEBUG nova.objects.instance [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lazy-loading 'flavor' on Instance uuid a42d5132-22e5-4551-80d2-fb7a55a7fa9e {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 912.747456] env[61629]: DEBUG oslo_concurrency.lockutils [req-ce2717ce-5989-4ced-a1c7-9f7ba62a6372 req-b1ab36d0-38f3-44a9-b632-2e4fcebdf1ba service nova] Releasing lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.757543] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 3085a70f-360c-43a3-80d7-e7b87fb3e146/3085a70f-360c-43a3-80d7-e7b87fb3e146.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 912.759040] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a043affa-0f09-428c-a01e-33c1c47608bb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.781583] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 912.781583] env[61629]: value = "task-1354308" [ 912.781583] env[61629]: _type = "Task" [ 912.781583] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.794209] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354308, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.814519] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.851654] env[61629]: DEBUG nova.compute.manager [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 912.965322] env[61629]: DEBUG nova.scheduler.client.report [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Updated inventory for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with generation 99 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 912.965680] env[61629]: DEBUG nova.compute.provider_tree [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Updating resource provider d075eff1-6f77-44a8-824e-16f3e03b4063 generation from 99 to 100 during operation: update_inventory {{(pid=61629) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 912.965878] env[61629]: DEBUG nova.compute.provider_tree [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 913.100914] env[61629]: DEBUG nova.compute.manager [req-cf5b50ec-6690-488e-b7eb-06617a04593d req-5859a151-37ac-4461-ae02-f166028f1cf3 service nova] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Received event network-vif-deleted-b0ccf912-7d97-4281-943f-c7ccdf8eec23 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 913.101137] env[61629]: INFO nova.compute.manager [req-cf5b50ec-6690-488e-b7eb-06617a04593d req-5859a151-37ac-4461-ae02-f166028f1cf3 service nova] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Neutron deleted interface b0ccf912-7d97-4281-943f-c7ccdf8eec23; detaching it from the instance and deleting it from the info cache [ 913.101424] env[61629]: DEBUG nova.network.neutron [req-cf5b50ec-6690-488e-b7eb-06617a04593d req-5859a151-37ac-4461-ae02-f166028f1cf3 service nova] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.179070] env[61629]: DEBUG oslo_concurrency.lockutils [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.232788] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Acquiring lock "refresh_cache-2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.232842] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Acquired lock "refresh_cache-2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.232953] env[61629]: DEBUG nova.network.neutron [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 913.236921] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e651b35a-b394-4409-8d9a-a26fc4c7590a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.689s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.238417] env[61629]: DEBUG oslo_concurrency.lockutils [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.059s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.238417] env[61629]: DEBUG oslo_concurrency.lockutils [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.238417] env[61629]: DEBUG oslo_concurrency.lockutils [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.238599] env[61629]: DEBUG oslo_concurrency.lockutils [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.240211] env[61629]: INFO nova.compute.manager [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Terminating instance [ 913.242835] env[61629]: DEBUG nova.compute.manager [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 913.243193] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 913.243600] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62fac2eb-fe5e-49f3-a235-7031d8ec973f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.251350] env[61629]: DEBUG oslo_vmware.api [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 913.251350] env[61629]: value = "task-1354309" [ 913.251350] env[61629]: _type = "Task" [ 913.251350] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.262115] env[61629]: DEBUG oslo_vmware.api [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354309, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.295623] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354308, 'name': ReconfigVM_Task, 'duration_secs': 0.510173} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.299871] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 3085a70f-360c-43a3-80d7-e7b87fb3e146/3085a70f-360c-43a3-80d7-e7b87fb3e146.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 913.299871] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f1c416a-34c3-4825-b2ae-e15a05903d38 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.307018] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 913.307018] env[61629]: value = "task-1354310" [ 913.307018] env[61629]: _type = "Task" [ 913.307018] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.318426] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354310, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.329374] env[61629]: DEBUG nova.network.neutron [-] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.377267] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.472426] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.593s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.473093] env[61629]: DEBUG nova.compute.manager [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 913.476919] env[61629]: DEBUG oslo_concurrency.lockutils [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.709s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.477216] env[61629]: DEBUG oslo_concurrency.lockutils [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.479662] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.415s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.481729] env[61629]: INFO nova.compute.claims [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.507781] env[61629]: INFO nova.scheduler.client.report [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Deleted allocations for instance 12c6b03b-8295-43de-898f-a6c35f1693b7 [ 913.525649] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.525928] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.603925] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8b8984bc-78b5-45a1-8216-885b68724a12 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.613448] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a2dcab-1e96-479f-bca3-14a68ce87516 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.643679] env[61629]: DEBUG nova.compute.manager [req-cf5b50ec-6690-488e-b7eb-06617a04593d req-5859a151-37ac-4461-ae02-f166028f1cf3 service nova] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Detach interface failed, port_id=b0ccf912-7d97-4281-943f-c7ccdf8eec23, reason: Instance d37958f8-7607-418b-9cfd-c3a5df721e94 could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 913.761344] env[61629]: DEBUG oslo_vmware.api [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354309, 'name': PowerOffVM_Task, 'duration_secs': 0.294309} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.761698] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 913.762729] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Volume detach. Driver type: vmdk {{(pid=61629) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 913.762729] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288534', 'volume_id': '038d52b8-1702-41d3-b2ef-775b800e1724', 'name': 'volume-038d52b8-1702-41d3-b2ef-775b800e1724', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a42d5132-22e5-4551-80d2-fb7a55a7fa9e', 'attached_at': '', 'detached_at': '', 'volume_id': '038d52b8-1702-41d3-b2ef-775b800e1724', 'serial': '038d52b8-1702-41d3-b2ef-775b800e1724'} {{(pid=61629) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 913.762931] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70afd20-3589-4b09-861c-dc8093386508 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.793029] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ebc95f2-e38b-4413-882c-4e446a39b3a0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.798128] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ece863-3c97-4fd2-bbcb-745bf2f7eb9c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.824419] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46efea5-71b7-4244-9270-9bae379094f3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.831962] env[61629]: INFO nova.compute.manager [-] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Took 1.29 seconds to deallocate network for instance. [ 913.832383] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354310, 'name': Rename_Task, 'duration_secs': 0.147039} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.844177] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 913.844797] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] The volume has not been displaced from its original location: [datastore2] volume-038d52b8-1702-41d3-b2ef-775b800e1724/volume-038d52b8-1702-41d3-b2ef-775b800e1724.vmdk. No consolidation needed. {{(pid=61629) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 913.850179] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Reconfiguring VM instance instance-00000050 to detach disk 2001 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 913.851061] env[61629]: DEBUG nova.network.neutron [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 913.856025] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73b7d2c9-ffdd-46d5-806c-349f33db1245 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.857645] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b313db07-6562-4394-a8b8-9375cf2070d3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.877628] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 913.877628] env[61629]: value = "task-1354311" [ 913.877628] env[61629]: _type = "Task" [ 913.877628] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.879093] env[61629]: DEBUG oslo_vmware.api [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 913.879093] env[61629]: value = "task-1354312" [ 913.879093] env[61629]: _type = "Task" [ 913.879093] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.890899] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354311, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.931393] env[61629]: DEBUG nova.compute.manager [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Received event network-changed-91aa1640-3097-4a26-9090-4081740f917d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 913.931595] env[61629]: DEBUG nova.compute.manager [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Refreshing instance network info cache due to event network-changed-91aa1640-3097-4a26-9090-4081740f917d. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 913.931819] env[61629]: DEBUG oslo_concurrency.lockutils [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] Acquiring lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.931990] env[61629]: DEBUG oslo_concurrency.lockutils [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] Acquired lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.932381] env[61629]: DEBUG nova.network.neutron [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Refreshing network info cache for port 91aa1640-3097-4a26-9090-4081740f917d {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 913.990511] env[61629]: DEBUG nova.compute.utils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 913.998843] env[61629]: DEBUG nova.compute.manager [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 913.999175] env[61629]: DEBUG nova.network.neutron [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 914.017207] env[61629]: DEBUG oslo_concurrency.lockutils [None req-555ce2a7-25d5-4c2c-b523-18788665ebbe tempest-InstanceActionsNegativeTestJSON-443323221 tempest-InstanceActionsNegativeTestJSON-443323221-project-member] Lock "12c6b03b-8295-43de-898f-a6c35f1693b7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.409s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.031392] env[61629]: DEBUG nova.compute.utils [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 914.088638] env[61629]: DEBUG nova.policy [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94392b52017b4ff9a4d463027eb6e272', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5317f3f744334c279f4cc5c3281aa3a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 914.278343] env[61629]: DEBUG nova.network.neutron [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Updating instance_info_cache with network_info: [{"id": "787f3ff1-d4f3-429f-8ee5-a5785d993cfc", "address": "fa:16:3e:ac:23:ad", "network": {"id": "2b98da83-9005-449d-ae25-3bad9cd73b35", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-987359251-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a2371a7e8504b14bd3788e9d9bd92a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap787f3ff1-d4", "ovs_interfaceid": "787f3ff1-d4f3-429f-8ee5-a5785d993cfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.373508] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.399620] env[61629]: DEBUG oslo_vmware.api [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354311, 'name': PowerOnVM_Task, 'duration_secs': 0.513457} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.402701] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 914.402938] env[61629]: INFO nova.compute.manager [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Took 10.77 seconds to spawn the instance on the hypervisor. [ 914.403138] env[61629]: DEBUG nova.compute.manager [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 914.403482] env[61629]: DEBUG oslo_vmware.api [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354312, 'name': ReconfigVM_Task, 'duration_secs': 0.317752} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.404234] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e388c8ae-8324-4f6d-b295-3cd158d9a6b9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.406865] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Reconfigured VM instance instance-00000050 to detach disk 2001 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 914.411573] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28888e17-3bd6-4e09-94e3-7e98039c6426 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.433582] env[61629]: DEBUG oslo_vmware.api [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 914.433582] env[61629]: value = "task-1354313" [ 914.433582] env[61629]: _type = "Task" [ 914.433582] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.445588] env[61629]: DEBUG oslo_vmware.api [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354313, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.496177] env[61629]: DEBUG nova.compute.manager [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 914.534111] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.638196] env[61629]: DEBUG nova.network.neutron [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Successfully created port: dd6bf404-0e6a-4868-a42a-abc64b875fa8 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 914.781176] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Releasing lock "refresh_cache-2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.781560] env[61629]: DEBUG nova.compute.manager [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Instance network_info: |[{"id": "787f3ff1-d4f3-429f-8ee5-a5785d993cfc", "address": "fa:16:3e:ac:23:ad", "network": {"id": "2b98da83-9005-449d-ae25-3bad9cd73b35", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-987359251-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a2371a7e8504b14bd3788e9d9bd92a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap787f3ff1-d4", "ovs_interfaceid": "787f3ff1-d4f3-429f-8ee5-a5785d993cfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 914.782038] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:23:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbdab640-5fea-4254-8bd3-f855b7eaca0d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '787f3ff1-d4f3-429f-8ee5-a5785d993cfc', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 914.790819] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Creating folder: Project (7a2371a7e8504b14bd3788e9d9bd92a2). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 914.791579] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6a9372c-9bf8-4049-885c-0596428fd978 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.805389] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Created folder: Project (7a2371a7e8504b14bd3788e9d9bd92a2) in parent group-v288443. [ 914.805600] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Creating folder: Instances. Parent ref: group-v288536. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 914.805853] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23f44d91-03ed-4113-b45f-a0d7fcec20f3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.813723] env[61629]: DEBUG nova.network.neutron [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updated VIF entry in instance network info cache for port 91aa1640-3097-4a26-9090-4081740f917d. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 914.814190] env[61629]: DEBUG nova.network.neutron [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updating instance_info_cache with network_info: [{"id": "91aa1640-3097-4a26-9090-4081740f917d", "address": "fa:16:3e:d4:a8:15", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91aa1640-30", "ovs_interfaceid": "91aa1640-3097-4a26-9090-4081740f917d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.819665] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Created folder: Instances in parent group-v288536. [ 914.819665] env[61629]: DEBUG oslo.service.loopingcall [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.819665] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 914.819665] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbb473e0-46e5-4718-b8a7-b8a563b2b8f3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.834853] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7c49ed-28e1-4213-b9e9-f418110bcf45 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.843686] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbe0227-f354-4ba8-b0c2-2aafbc9f1657 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.846912] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 914.846912] env[61629]: value = "task-1354316" [ 914.846912] env[61629]: _type = "Task" [ 914.846912] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.884250] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d52ed69-334d-41fd-94be-9f506d022349 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.890903] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354316, 'name': CreateVM_Task} progress is 15%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.896184] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b685185-f8e0-44c9-87f6-a3bf9d1d68f2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.913087] env[61629]: DEBUG nova.compute.provider_tree [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.945096] env[61629]: INFO nova.compute.manager [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Took 33.32 seconds to build instance. [ 914.950991] env[61629]: DEBUG oslo_vmware.api [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354313, 'name': ReconfigVM_Task, 'duration_secs': 0.446962} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.951567] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288534', 'volume_id': '038d52b8-1702-41d3-b2ef-775b800e1724', 'name': 'volume-038d52b8-1702-41d3-b2ef-775b800e1724', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a42d5132-22e5-4551-80d2-fb7a55a7fa9e', 'attached_at': '', 'detached_at': '', 'volume_id': '038d52b8-1702-41d3-b2ef-775b800e1724', 'serial': '038d52b8-1702-41d3-b2ef-775b800e1724'} {{(pid=61629) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 914.951849] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 914.952709] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d6b607-220d-485b-8089-89e1d713fad8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.960008] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 914.960293] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b331a305-9cc8-4cdb-8d1a-49a32ea7de11 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.032918] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.032918] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.033453] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleting the datastore file [datastore1] a42d5132-22e5-4551-80d2-fb7a55a7fa9e {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.033559] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c6d9001-20ec-4665-b7b9-32195a33fdb1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.042184] env[61629]: DEBUG oslo_vmware.api [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 915.042184] env[61629]: value = "task-1354318" [ 915.042184] env[61629]: _type = "Task" [ 915.042184] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.053209] env[61629]: DEBUG oslo_vmware.api [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354318, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.131811] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.213534] env[61629]: DEBUG nova.compute.manager [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Received event network-changed-a193ab2f-5a9d-4411-94f9-cc5834b60795 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 915.213687] env[61629]: DEBUG nova.compute.manager [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Refreshing instance network info cache due to event network-changed-a193ab2f-5a9d-4411-94f9-cc5834b60795. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 915.213915] env[61629]: DEBUG oslo_concurrency.lockutils [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] Acquiring lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.214155] env[61629]: DEBUG oslo_concurrency.lockutils [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] Acquired lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.214277] env[61629]: DEBUG nova.network.neutron [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Refreshing network info cache for port a193ab2f-5a9d-4411-94f9-cc5834b60795 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 915.316911] env[61629]: DEBUG oslo_concurrency.lockutils [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] Releasing lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.317274] env[61629]: DEBUG nova.compute.manager [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Received event network-changed-a193ab2f-5a9d-4411-94f9-cc5834b60795 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 915.317468] env[61629]: DEBUG nova.compute.manager [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Refreshing instance network info cache due to event network-changed-a193ab2f-5a9d-4411-94f9-cc5834b60795. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 915.317660] env[61629]: DEBUG oslo_concurrency.lockutils [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] Acquiring lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.361027] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354316, 'name': CreateVM_Task, 'duration_secs': 0.501175} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.361027] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 915.361027] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.361027] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.361027] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 915.361027] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11820abc-82ed-4fd0-82a5-c03c22b9bff3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.367188] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Waiting for the task: (returnval){ [ 915.367188] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]526131f5-f369-8dc8-4f76-3eefff74db59" [ 915.367188] env[61629]: _type = "Task" [ 915.367188] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.373884] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526131f5-f369-8dc8-4f76-3eefff74db59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.416975] env[61629]: DEBUG nova.scheduler.client.report [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 915.452824] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9c2bf4ad-7915-4ad0-962b-d77d3ec4ef87 tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "3085a70f-360c-43a3-80d7-e7b87fb3e146" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.592s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.505213] env[61629]: DEBUG nova.compute.manager [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 915.533172] env[61629]: DEBUG nova.virt.hardware [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 915.533450] env[61629]: DEBUG nova.virt.hardware [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 915.533611] env[61629]: DEBUG nova.virt.hardware [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 915.533794] env[61629]: DEBUG nova.virt.hardware [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 915.533948] env[61629]: DEBUG nova.virt.hardware [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 915.534918] env[61629]: DEBUG nova.virt.hardware [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 915.535205] env[61629]: DEBUG nova.virt.hardware [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 915.535408] env[61629]: DEBUG nova.virt.hardware [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 915.535585] env[61629]: DEBUG nova.virt.hardware [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 915.535754] env[61629]: DEBUG nova.virt.hardware [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 915.536418] env[61629]: DEBUG nova.virt.hardware [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 915.537306] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54a1338-e574-457f-8300-6ef0993b9258 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.550516] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a17b05f-58c5-40ea-b712-ca5c0bd4fba7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.559412] env[61629]: DEBUG oslo_vmware.api [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354318, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.375028} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.568098] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 915.568098] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 915.568098] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 915.568098] env[61629]: INFO nova.compute.manager [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Took 2.32 seconds to destroy the instance on the hypervisor. [ 915.568098] env[61629]: DEBUG oslo.service.loopingcall [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 915.568866] env[61629]: DEBUG nova.compute.manager [-] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 915.568986] env[61629]: DEBUG nova.network.neutron [-] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 915.616538] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.616538] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.616538] env[61629]: INFO nova.compute.manager [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Attaching volume 3fb90aea-e719-47bc-a306-d3502438a6d2 to /dev/sdc [ 915.755526] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5fb783-5a56-403c-a8f1-ddd7d15c857a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.762687] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dffc834-0fb7-4927-9589-a01c5b4bca1c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.776743] env[61629]: DEBUG nova.virt.block_device [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Updating existing volume attachment record: 0913d0e1-e92a-4f49-83ca-bdfeba00ec38 {{(pid=61629) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 915.804752] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "3085a70f-360c-43a3-80d7-e7b87fb3e146" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.805854] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "3085a70f-360c-43a3-80d7-e7b87fb3e146" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.806131] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "3085a70f-360c-43a3-80d7-e7b87fb3e146-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.806336] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "3085a70f-360c-43a3-80d7-e7b87fb3e146-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.808123] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "3085a70f-360c-43a3-80d7-e7b87fb3e146-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.810672] env[61629]: INFO nova.compute.manager [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Terminating instance [ 915.815514] env[61629]: DEBUG nova.compute.manager [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 915.815726] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 915.816593] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a26c589b-d237-442b-a6a5-3e689b2328e5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.824925] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 915.825714] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aaeb2aca-3090-445c-9cd3-d9331e04047b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.832340] env[61629]: DEBUG oslo_vmware.api [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 915.832340] env[61629]: value = "task-1354319" [ 915.832340] env[61629]: _type = "Task" [ 915.832340] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.842456] env[61629]: DEBUG oslo_vmware.api [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354319, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.875027] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526131f5-f369-8dc8-4f76-3eefff74db59, 'name': SearchDatastore_Task, 'duration_secs': 0.010419} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.875585] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.876171] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 915.876336] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.876566] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.876888] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 915.877280] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46772389-bd80-43aa-8f7c-3ef7a0787340 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.890026] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 915.890026] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 915.890026] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-617a06f1-b034-413b-9c59-2c930908bead {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.896677] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Waiting for the task: (returnval){ [ 915.896677] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52c34346-017b-ab94-1212-e42f94df8c3a" [ 915.896677] env[61629]: _type = "Task" [ 915.896677] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.910274] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c34346-017b-ab94-1212-e42f94df8c3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.922355] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.922902] env[61629]: DEBUG nova.compute.manager [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 915.925941] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.124s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.926224] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.929624] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.989s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.930485] env[61629]: DEBUG nova.objects.instance [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lazy-loading 'resources' on Instance uuid 1d451558-dbbc-4942-b739-5d4b88057a75 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 915.958343] env[61629]: INFO nova.scheduler.client.report [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Deleted allocations for instance 109ab664-3bb9-420e-a4a5-526277c60b96 [ 916.346443] env[61629]: DEBUG oslo_vmware.api [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354319, 'name': PowerOffVM_Task, 'duration_secs': 0.185511} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.350563] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 916.350719] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 916.352894] env[61629]: DEBUG nova.compute.manager [req-e553a993-b0f9-4cfc-9203-911e7cac99ba req-a34687ec-2e64-43f0-ae68-c5bea67a12f1 service nova] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Received event network-vif-plugged-dd6bf404-0e6a-4868-a42a-abc64b875fa8 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 916.353058] env[61629]: DEBUG oslo_concurrency.lockutils [req-e553a993-b0f9-4cfc-9203-911e7cac99ba req-a34687ec-2e64-43f0-ae68-c5bea67a12f1 service nova] Acquiring lock "cd165a78-21f9-4fc7-88e5-5ab35047eacc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.353617] env[61629]: DEBUG oslo_concurrency.lockutils [req-e553a993-b0f9-4cfc-9203-911e7cac99ba req-a34687ec-2e64-43f0-ae68-c5bea67a12f1 service nova] Lock "cd165a78-21f9-4fc7-88e5-5ab35047eacc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.353809] env[61629]: DEBUG oslo_concurrency.lockutils [req-e553a993-b0f9-4cfc-9203-911e7cac99ba req-a34687ec-2e64-43f0-ae68-c5bea67a12f1 service nova] Lock "cd165a78-21f9-4fc7-88e5-5ab35047eacc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.353986] env[61629]: DEBUG nova.compute.manager [req-e553a993-b0f9-4cfc-9203-911e7cac99ba req-a34687ec-2e64-43f0-ae68-c5bea67a12f1 service nova] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] No waiting events found dispatching network-vif-plugged-dd6bf404-0e6a-4868-a42a-abc64b875fa8 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 916.354176] env[61629]: WARNING nova.compute.manager [req-e553a993-b0f9-4cfc-9203-911e7cac99ba req-a34687ec-2e64-43f0-ae68-c5bea67a12f1 service nova] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Received unexpected event network-vif-plugged-dd6bf404-0e6a-4868-a42a-abc64b875fa8 for instance with vm_state building and task_state spawning. [ 916.354468] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5c76c432-5fb6-4a15-a5b2-e5e5f95437ad {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.401881] env[61629]: DEBUG nova.network.neutron [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Successfully updated port: dd6bf404-0e6a-4868-a42a-abc64b875fa8 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 916.413343] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c34346-017b-ab94-1212-e42f94df8c3a, 'name': SearchDatastore_Task, 'duration_secs': 0.015485} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.415319] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbf848e4-3b21-4adb-9d05-007001fc37cd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.422391] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Waiting for the task: (returnval){ [ 916.422391] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52226268-0255-aa98-9475-3ec00b2e1d12" [ 916.422391] env[61629]: _type = "Task" [ 916.422391] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.434463] env[61629]: DEBUG nova.compute.utils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 916.436526] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52226268-0255-aa98-9475-3ec00b2e1d12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.437209] env[61629]: DEBUG nova.compute.manager [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 916.437420] env[61629]: DEBUG nova.network.neutron [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 916.446298] env[61629]: DEBUG nova.network.neutron [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Updated VIF entry in instance network info cache for port a193ab2f-5a9d-4411-94f9-cc5834b60795. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 916.449077] env[61629]: DEBUG nova.network.neutron [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Updating instance_info_cache with network_info: [{"id": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "address": "fa:16:3e:f8:02:ef", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa193ab2f-5a", "ovs_interfaceid": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.470025] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5773e932-4489-489b-80ed-e4d5d243024d tempest-ServerPasswordTestJSON-782114612 tempest-ServerPasswordTestJSON-782114612-project-member] Lock "109ab664-3bb9-420e-a4a5-526277c60b96" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.686s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.470025] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 916.470025] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 916.470730] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Deleting the datastore file [datastore1] 3085a70f-360c-43a3-80d7-e7b87fb3e146 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 916.474083] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e696def-ca66-4a6e-b195-28a90fa70125 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.483161] env[61629]: DEBUG oslo_vmware.api [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for the task: (returnval){ [ 916.483161] env[61629]: value = "task-1354322" [ 916.483161] env[61629]: _type = "Task" [ 916.483161] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.494177] env[61629]: DEBUG oslo_vmware.api [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354322, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.541312] env[61629]: DEBUG nova.policy [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23e2354567b747cab5a15764026e21f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd318d29ec50427eb997c83837120c9c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 916.740030] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca093d6-65c2-4e85-bfce-55517bb5977a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.754060] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de04851-4dcf-46f7-967e-e81605012e28 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.790209] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce10f1f-69c0-416b-8cf0-214648b1eb0f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.798654] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a056c2d-0361-4b2a-88d9-4de09e41caa2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.806125] env[61629]: DEBUG nova.network.neutron [-] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.824370] env[61629]: DEBUG nova.compute.provider_tree [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 916.828418] env[61629]: INFO nova.compute.manager [-] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Took 1.26 seconds to deallocate network for instance. [ 916.904327] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Acquiring lock "refresh_cache-cd165a78-21f9-4fc7-88e5-5ab35047eacc" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.904655] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Acquired lock "refresh_cache-cd165a78-21f9-4fc7-88e5-5ab35047eacc" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.904768] env[61629]: DEBUG nova.network.neutron [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 916.936429] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52226268-0255-aa98-9475-3ec00b2e1d12, 'name': SearchDatastore_Task, 'duration_secs': 0.018019} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.936429] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.936429] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b/2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 916.936429] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0816a2b-8c34-44a7-8b23-3eefbc7c0a3f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.937200] env[61629]: DEBUG nova.compute.manager [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 916.946650] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Waiting for the task: (returnval){ [ 916.946650] env[61629]: value = "task-1354323" [ 916.946650] env[61629]: _type = "Task" [ 916.946650] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.950468] env[61629]: DEBUG oslo_concurrency.lockutils [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] Releasing lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.950726] env[61629]: DEBUG nova.compute.manager [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Received event network-changed-91aa1640-3097-4a26-9090-4081740f917d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 916.950948] env[61629]: DEBUG nova.compute.manager [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Refreshing instance network info cache due to event network-changed-91aa1640-3097-4a26-9090-4081740f917d. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 916.951206] env[61629]: DEBUG oslo_concurrency.lockutils [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] Acquiring lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.951361] env[61629]: DEBUG oslo_concurrency.lockutils [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] Acquired lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.951529] env[61629]: DEBUG nova.network.neutron [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Refreshing network info cache for port 91aa1640-3097-4a26-9090-4081740f917d {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 916.952693] env[61629]: DEBUG oslo_concurrency.lockutils [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] Acquired lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.952881] env[61629]: DEBUG nova.network.neutron [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Refreshing network info cache for port a193ab2f-5a9d-4411-94f9-cc5834b60795 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 916.958862] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354323, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.991905] env[61629]: DEBUG oslo_vmware.api [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Task: {'id': task-1354322, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161671} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.992272] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 916.992485] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 916.992689] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.992886] env[61629]: INFO nova.compute.manager [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Took 1.18 seconds to destroy the instance on the hypervisor. [ 916.993200] env[61629]: DEBUG oslo.service.loopingcall [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 916.996604] env[61629]: DEBUG nova.compute.manager [-] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 916.996604] env[61629]: DEBUG nova.network.neutron [-] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 917.156461] env[61629]: DEBUG nova.network.neutron [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Successfully created port: 57805f12-9b81-4485-8f3a-32567ed40a8c {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.249989] env[61629]: DEBUG nova.compute.manager [req-5f904279-f5dc-4282-b2e9-59213e89b048 req-3dd89ed4-4842-482e-a6a6-9901bc81dc1f service nova] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Received event network-vif-deleted-ce03096a-81c3-496e-96ec-bb52e2ed7d48 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 917.332721] env[61629]: DEBUG nova.scheduler.client.report [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 917.394428] env[61629]: INFO nova.compute.manager [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Took 0.57 seconds to detach 1 volumes for instance. [ 917.452063] env[61629]: DEBUG nova.network.neutron [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 917.464839] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354323, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.741954] env[61629]: DEBUG nova.network.neutron [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Updating instance_info_cache with network_info: [{"id": "dd6bf404-0e6a-4868-a42a-abc64b875fa8", "address": "fa:16:3e:3a:6e:b3", "network": {"id": "80098c9c-3683-4298-9ac9-4cf114589ae1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.69", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9a36b70b3bef49e68cbe43ec3eaa5dc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd6bf404-0e", "ovs_interfaceid": "dd6bf404-0e6a-4868-a42a-abc64b875fa8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.838365] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.909s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.841324] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.639s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.841568] env[61629]: DEBUG nova.objects.instance [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lazy-loading 'resources' on Instance uuid fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.843251] env[61629]: DEBUG nova.network.neutron [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updated VIF entry in instance network info cache for port 91aa1640-3097-4a26-9090-4081740f917d. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 917.843575] env[61629]: DEBUG nova.network.neutron [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updating instance_info_cache with network_info: [{"id": "91aa1640-3097-4a26-9090-4081740f917d", "address": "fa:16:3e:d4:a8:15", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91aa1640-30", "ovs_interfaceid": "91aa1640-3097-4a26-9090-4081740f917d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.869883] env[61629]: INFO nova.scheduler.client.report [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Deleted allocations for instance 1d451558-dbbc-4942-b739-5d4b88057a75 [ 917.906986] env[61629]: DEBUG oslo_concurrency.lockutils [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.949184] env[61629]: DEBUG nova.compute.manager [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 917.972410] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354323, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58057} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.972709] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b/2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 917.972927] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 917.973218] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a73d948-b0e9-4104-a31e-da38cc0fb677 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.981662] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Waiting for the task: (returnval){ [ 917.981662] env[61629]: value = "task-1354324" [ 917.981662] env[61629]: _type = "Task" [ 917.981662] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.992598] env[61629]: DEBUG nova.virt.hardware [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 917.992858] env[61629]: DEBUG nova.virt.hardware [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 917.993066] env[61629]: DEBUG nova.virt.hardware [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 917.993332] env[61629]: DEBUG nova.virt.hardware [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 917.993654] env[61629]: DEBUG nova.virt.hardware [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 917.993868] env[61629]: DEBUG nova.virt.hardware [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 917.994168] env[61629]: DEBUG nova.virt.hardware [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 917.994370] env[61629]: DEBUG nova.virt.hardware [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 917.994565] env[61629]: DEBUG nova.virt.hardware [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 917.994803] env[61629]: DEBUG nova.virt.hardware [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 917.994996] env[61629]: DEBUG nova.virt.hardware [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 917.995832] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b8f97c-9e74-4487-a8cf-08a7702d6233 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.001308] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354324, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.006219] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2498b85c-3037-400b-91de-31d7c656537e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.093963] env[61629]: DEBUG nova.network.neutron [-] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.247547] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Releasing lock "refresh_cache-cd165a78-21f9-4fc7-88e5-5ab35047eacc" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.248604] env[61629]: DEBUG nova.compute.manager [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Instance network_info: |[{"id": "dd6bf404-0e6a-4868-a42a-abc64b875fa8", "address": "fa:16:3e:3a:6e:b3", "network": {"id": "80098c9c-3683-4298-9ac9-4cf114589ae1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.69", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9a36b70b3bef49e68cbe43ec3eaa5dc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd6bf404-0e", "ovs_interfaceid": "dd6bf404-0e6a-4868-a42a-abc64b875fa8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 918.249540] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:6e:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '193994c7-8e1b-4f25-a4a4-d0563845eb28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dd6bf404-0e6a-4868-a42a-abc64b875fa8', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 918.273466] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Creating folder: Project (5317f3f744334c279f4cc5c3281aa3a1). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 918.276490] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4d66f9f-c970-4f16-bda9-a6a031ede25e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.290050] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Created folder: Project (5317f3f744334c279f4cc5c3281aa3a1) in parent group-v288443. [ 918.290357] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Creating folder: Instances. Parent ref: group-v288540. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 918.290873] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31d8e01a-e8ec-48a7-92a2-0483c2584eae {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.300680] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Created folder: Instances in parent group-v288540. [ 918.301052] env[61629]: DEBUG oslo.service.loopingcall [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 918.301337] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 918.301629] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c51df7f2-6d0b-4c56-ba7b-d82494c8439c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.336649] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 918.336649] env[61629]: value = "task-1354328" [ 918.336649] env[61629]: _type = "Task" [ 918.336649] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.347058] env[61629]: DEBUG nova.objects.instance [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lazy-loading 'numa_topology' on Instance uuid fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.348130] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354328, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.354112] env[61629]: DEBUG oslo_concurrency.lockutils [req-1fcc7eaa-5bc3-406c-b1dc-4441744d31a4 req-eb2dd58b-dfc9-4f13-8c04-67b8f7b5282c service nova] Releasing lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.367428] env[61629]: DEBUG nova.network.neutron [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Updated VIF entry in instance network info cache for port a193ab2f-5a9d-4411-94f9-cc5834b60795. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 918.367894] env[61629]: DEBUG nova.network.neutron [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Updating instance_info_cache with network_info: [{"id": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "address": "fa:16:3e:f8:02:ef", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa193ab2f-5a", "ovs_interfaceid": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.386360] env[61629]: DEBUG nova.compute.manager [req-ff12a637-777a-4d93-8dc1-3508d6ccff18 req-f4a2d3a5-a88a-4b78-8a32-9ffe1f43a635 service nova] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Received event network-changed-dd6bf404-0e6a-4868-a42a-abc64b875fa8 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 918.386360] env[61629]: DEBUG nova.compute.manager [req-ff12a637-777a-4d93-8dc1-3508d6ccff18 req-f4a2d3a5-a88a-4b78-8a32-9ffe1f43a635 service nova] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Refreshing instance network info cache due to event network-changed-dd6bf404-0e6a-4868-a42a-abc64b875fa8. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 918.386629] env[61629]: DEBUG oslo_concurrency.lockutils [req-ff12a637-777a-4d93-8dc1-3508d6ccff18 req-f4a2d3a5-a88a-4b78-8a32-9ffe1f43a635 service nova] Acquiring lock "refresh_cache-cd165a78-21f9-4fc7-88e5-5ab35047eacc" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.386681] env[61629]: DEBUG oslo_concurrency.lockutils [req-ff12a637-777a-4d93-8dc1-3508d6ccff18 req-f4a2d3a5-a88a-4b78-8a32-9ffe1f43a635 service nova] Acquired lock "refresh_cache-cd165a78-21f9-4fc7-88e5-5ab35047eacc" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.386831] env[61629]: DEBUG nova.network.neutron [req-ff12a637-777a-4d93-8dc1-3508d6ccff18 req-f4a2d3a5-a88a-4b78-8a32-9ffe1f43a635 service nova] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Refreshing network info cache for port dd6bf404-0e6a-4868-a42a-abc64b875fa8 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 918.388593] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4a370fe9-044f-4f36-b4f7-94818693afa1 tempest-ImagesTestJSON-873757406 tempest-ImagesTestJSON-873757406-project-member] Lock "1d451558-dbbc-4942-b739-5d4b88057a75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.039s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.497945] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354324, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096214} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.498277] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 918.499139] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b7b2a0-8c78-497d-9316-cfdaac5148fc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.522721] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b/2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 918.523041] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cdf80127-f6a6-4b4e-958d-c4a29233e7f7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.543677] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Waiting for the task: (returnval){ [ 918.543677] env[61629]: value = "task-1354329" [ 918.543677] env[61629]: _type = "Task" [ 918.543677] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.552349] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354329, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.597067] env[61629]: INFO nova.compute.manager [-] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Took 1.60 seconds to deallocate network for instance. [ 918.849705] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354328, 'name': CreateVM_Task, 'duration_secs': 0.337789} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.849705] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 918.849705] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.849705] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.849705] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 918.849705] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e633d544-835e-4af3-be43-825dea5e8a2e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.853756] env[61629]: DEBUG nova.objects.base [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=61629) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 918.859161] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Waiting for the task: (returnval){ [ 918.859161] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5251faba-6833-7426-86eb-68b9a3d48092" [ 918.859161] env[61629]: _type = "Task" [ 918.859161] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.872425] env[61629]: DEBUG oslo_concurrency.lockutils [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] Releasing lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.873044] env[61629]: DEBUG nova.compute.manager [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Received event network-vif-plugged-787f3ff1-d4f3-429f-8ee5-a5785d993cfc {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 918.873441] env[61629]: DEBUG oslo_concurrency.lockutils [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] Acquiring lock "2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.873823] env[61629]: DEBUG oslo_concurrency.lockutils [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] Lock "2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.874367] env[61629]: DEBUG oslo_concurrency.lockutils [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] Lock "2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.874709] env[61629]: DEBUG nova.compute.manager [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] No waiting events found dispatching network-vif-plugged-787f3ff1-d4f3-429f-8ee5-a5785d993cfc {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 918.878020] env[61629]: WARNING nova.compute.manager [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Received unexpected event network-vif-plugged-787f3ff1-d4f3-429f-8ee5-a5785d993cfc for instance with vm_state building and task_state spawning. [ 918.878020] env[61629]: DEBUG nova.compute.manager [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Received event network-changed-787f3ff1-d4f3-429f-8ee5-a5785d993cfc {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 918.878020] env[61629]: DEBUG nova.compute.manager [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Refreshing instance network info cache due to event network-changed-787f3ff1-d4f3-429f-8ee5-a5785d993cfc. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 918.878020] env[61629]: DEBUG oslo_concurrency.lockutils [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] Acquiring lock "refresh_cache-2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.878020] env[61629]: DEBUG oslo_concurrency.lockutils [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] Acquired lock "refresh_cache-2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.878020] env[61629]: DEBUG nova.network.neutron [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Refreshing network info cache for port 787f3ff1-d4f3-429f-8ee5-a5785d993cfc {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 918.878020] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5251faba-6833-7426-86eb-68b9a3d48092, 'name': SearchDatastore_Task, 'duration_secs': 0.010805} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.878020] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.878020] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 918.878020] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.879203] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.880073] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 918.880712] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be10d500-5c04-454a-b313-8e70f4c3f916 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.894509] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 918.894787] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 918.895612] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1444931-fca5-4c95-95bc-31f4a298c8fc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.909022] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Waiting for the task: (returnval){ [ 918.909022] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52c3022a-f8c3-2996-5c39-293ef92cabd5" [ 918.909022] env[61629]: _type = "Task" [ 918.909022] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.915653] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c3022a-f8c3-2996-5c39-293ef92cabd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.057735] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354329, 'name': ReconfigVM_Task, 'duration_secs': 0.366638} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.060470] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b/2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 919.061356] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2a7dd65-6462-41d3-9157-1c2d8f6ec1d6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.072922] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Waiting for the task: (returnval){ [ 919.072922] env[61629]: value = "task-1354330" [ 919.072922] env[61629]: _type = "Task" [ 919.072922] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.083018] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354330, 'name': Rename_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.106553] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.207825] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da643ad-a56a-4043-b853-b62deb40e384 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.213535] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c2927d-bfba-47f5-a37b-f2822167efed {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.249846] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93844478-16a7-4781-882e-b9fe4929e65a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.258747] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe65070-0ae5-478a-a5fc-0a39ab1779c4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.271294] env[61629]: DEBUG nova.compute.provider_tree [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 919.340158] env[61629]: DEBUG nova.network.neutron [req-ff12a637-777a-4d93-8dc1-3508d6ccff18 req-f4a2d3a5-a88a-4b78-8a32-9ffe1f43a635 service nova] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Updated VIF entry in instance network info cache for port dd6bf404-0e6a-4868-a42a-abc64b875fa8. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 919.340533] env[61629]: DEBUG nova.network.neutron [req-ff12a637-777a-4d93-8dc1-3508d6ccff18 req-f4a2d3a5-a88a-4b78-8a32-9ffe1f43a635 service nova] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Updating instance_info_cache with network_info: [{"id": "dd6bf404-0e6a-4868-a42a-abc64b875fa8", "address": "fa:16:3e:3a:6e:b3", "network": {"id": "80098c9c-3683-4298-9ac9-4cf114589ae1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.69", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9a36b70b3bef49e68cbe43ec3eaa5dc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "193994c7-8e1b-4f25-a4a4-d0563845eb28", "external-id": "nsx-vlan-transportzone-607", "segmentation_id": 607, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd6bf404-0e", "ovs_interfaceid": "dd6bf404-0e6a-4868-a42a-abc64b875fa8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.385124] env[61629]: DEBUG nova.compute.manager [req-522a40f5-1e59-4c40-b758-0d6e6b354d9b req-d2ca8067-7f9d-4dd2-b8b7-3a2c39b6e059 service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Received event network-vif-deleted-6df3f8f5-6430-44cb-ac3e-34209467a856 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.385335] env[61629]: DEBUG nova.compute.manager [req-522a40f5-1e59-4c40-b758-0d6e6b354d9b req-d2ca8067-7f9d-4dd2-b8b7-3a2c39b6e059 service nova] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Received event network-vif-deleted-f88f7616-a027-435a-b8a9-2a3cfffadd38 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.419935] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c3022a-f8c3-2996-5c39-293ef92cabd5, 'name': SearchDatastore_Task, 'duration_secs': 0.00865} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.420759] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92999c1d-9bd9-4b54-8bdf-bf9c10b5ce80 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.428462] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Waiting for the task: (returnval){ [ 919.428462] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52eca0a1-e6de-2b78-01c5-71997a14002d" [ 919.428462] env[61629]: _type = "Task" [ 919.428462] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.436357] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52eca0a1-e6de-2b78-01c5-71997a14002d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.586089] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354330, 'name': Rename_Task, 'duration_secs': 0.145276} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.586621] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 919.587072] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-04f9bedf-632d-44c4-9ae4-fbaf6fa1a7c0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.596187] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Waiting for the task: (returnval){ [ 919.596187] env[61629]: value = "task-1354331" [ 919.596187] env[61629]: _type = "Task" [ 919.596187] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.602825] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354331, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.775402] env[61629]: DEBUG nova.scheduler.client.report [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 919.843605] env[61629]: DEBUG oslo_concurrency.lockutils [req-ff12a637-777a-4d93-8dc1-3508d6ccff18 req-f4a2d3a5-a88a-4b78-8a32-9ffe1f43a635 service nova] Releasing lock "refresh_cache-cd165a78-21f9-4fc7-88e5-5ab35047eacc" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.856430] env[61629]: DEBUG nova.network.neutron [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Updated VIF entry in instance network info cache for port 787f3ff1-d4f3-429f-8ee5-a5785d993cfc. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 919.856834] env[61629]: DEBUG nova.network.neutron [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Updating instance_info_cache with network_info: [{"id": "787f3ff1-d4f3-429f-8ee5-a5785d993cfc", "address": "fa:16:3e:ac:23:ad", "network": {"id": "2b98da83-9005-449d-ae25-3bad9cd73b35", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-987359251-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a2371a7e8504b14bd3788e9d9bd92a2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap787f3ff1-d4", "ovs_interfaceid": "787f3ff1-d4f3-429f-8ee5-a5785d993cfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.927534] env[61629]: DEBUG nova.network.neutron [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Successfully updated port: 57805f12-9b81-4485-8f3a-32567ed40a8c {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 919.944500] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52eca0a1-e6de-2b78-01c5-71997a14002d, 'name': SearchDatastore_Task, 'duration_secs': 0.009122} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.944845] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.945127] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] cd165a78-21f9-4fc7-88e5-5ab35047eacc/cd165a78-21f9-4fc7-88e5-5ab35047eacc.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 919.945671] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fcd5af92-1273-401a-b664-a6e222724703 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.953374] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Waiting for the task: (returnval){ [ 919.953374] env[61629]: value = "task-1354332" [ 919.953374] env[61629]: _type = "Task" [ 919.953374] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.962028] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': task-1354332, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.109366] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354331, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.285834] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.443s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.287484] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 8.949s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.338609] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Volume attach. Driver type: vmdk {{(pid=61629) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 920.339232] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288539', 'volume_id': '3fb90aea-e719-47bc-a306-d3502438a6d2', 'name': 'volume-3fb90aea-e719-47bc-a306-d3502438a6d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2b01eeae-64be-44b3-b4cf-c2a8490043e3', 'attached_at': '', 'detached_at': '', 'volume_id': '3fb90aea-e719-47bc-a306-d3502438a6d2', 'serial': '3fb90aea-e719-47bc-a306-d3502438a6d2'} {{(pid=61629) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 920.340202] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa737fe-72de-494d-81a5-9e59633d5a1a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.362020] env[61629]: DEBUG oslo_concurrency.lockutils [req-8d108fa7-8c67-4d15-bb98-7c0761c1fe4b req-a424f4c3-f29b-40bd-a5e4-729d25b5a74a service nova] Releasing lock "refresh_cache-2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.363029] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45128274-cca1-4f21-b4e6-9336a170889a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.401967] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] volume-3fb90aea-e719-47bc-a306-d3502438a6d2/volume-3fb90aea-e719-47bc-a306-d3502438a6d2.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 920.402329] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1808fc0e-d30d-490f-9190-afc930f336c4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.427518] env[61629]: DEBUG oslo_vmware.api [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 920.427518] env[61629]: value = "task-1354333" [ 920.427518] env[61629]: _type = "Task" [ 920.427518] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.431130] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 920.431897] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquired lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.431897] env[61629]: DEBUG nova.network.neutron [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.441338] env[61629]: DEBUG oslo_vmware.api [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354333, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.465077] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': task-1354332, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.606128] env[61629]: DEBUG oslo_vmware.api [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354331, 'name': PowerOnVM_Task, 'duration_secs': 0.769974} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.606375] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 920.606593] env[61629]: INFO nova.compute.manager [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Took 8.69 seconds to spawn the instance on the hypervisor. [ 920.606777] env[61629]: DEBUG nova.compute.manager [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 920.607791] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072c73c1-c1dd-4813-9ab8-a771aad73406 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.642147] env[61629]: DEBUG nova.compute.manager [req-5b4f5238-c223-455f-8b2f-4b6b1da0563f req-55f08230-cfaf-4fcc-97f3-e23488837c72 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Received event network-vif-plugged-57805f12-9b81-4485-8f3a-32567ed40a8c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 920.642147] env[61629]: DEBUG oslo_concurrency.lockutils [req-5b4f5238-c223-455f-8b2f-4b6b1da0563f req-55f08230-cfaf-4fcc-97f3-e23488837c72 service nova] Acquiring lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.642560] env[61629]: DEBUG oslo_concurrency.lockutils [req-5b4f5238-c223-455f-8b2f-4b6b1da0563f req-55f08230-cfaf-4fcc-97f3-e23488837c72 service nova] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.642560] env[61629]: DEBUG oslo_concurrency.lockutils [req-5b4f5238-c223-455f-8b2f-4b6b1da0563f req-55f08230-cfaf-4fcc-97f3-e23488837c72 service nova] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.642671] env[61629]: DEBUG nova.compute.manager [req-5b4f5238-c223-455f-8b2f-4b6b1da0563f req-55f08230-cfaf-4fcc-97f3-e23488837c72 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] No waiting events found dispatching network-vif-plugged-57805f12-9b81-4485-8f3a-32567ed40a8c {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 920.642888] env[61629]: WARNING nova.compute.manager [req-5b4f5238-c223-455f-8b2f-4b6b1da0563f req-55f08230-cfaf-4fcc-97f3-e23488837c72 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Received unexpected event network-vif-plugged-57805f12-9b81-4485-8f3a-32567ed40a8c for instance with vm_state building and task_state spawning. [ 920.643232] env[61629]: DEBUG nova.compute.manager [req-5b4f5238-c223-455f-8b2f-4b6b1da0563f req-55f08230-cfaf-4fcc-97f3-e23488837c72 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Received event network-changed-57805f12-9b81-4485-8f3a-32567ed40a8c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 920.643434] env[61629]: DEBUG nova.compute.manager [req-5b4f5238-c223-455f-8b2f-4b6b1da0563f req-55f08230-cfaf-4fcc-97f3-e23488837c72 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Refreshing instance network info cache due to event network-changed-57805f12-9b81-4485-8f3a-32567ed40a8c. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 920.644431] env[61629]: DEBUG oslo_concurrency.lockutils [req-5b4f5238-c223-455f-8b2f-4b6b1da0563f req-55f08230-cfaf-4fcc-97f3-e23488837c72 service nova] Acquiring lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 920.796054] env[61629]: INFO nova.compute.claims [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 920.803651] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b3594c4a-9e1a-49b9-b2d9-a0a5923e5488 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 31.205s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.806775] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 5.674s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.806775] env[61629]: INFO nova.compute.manager [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Unshelving [ 920.937792] env[61629]: DEBUG oslo_vmware.api [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354333, 'name': ReconfigVM_Task, 'duration_secs': 0.410434} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.938207] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Reconfigured VM instance instance-00000043 to attach disk [datastore2] volume-3fb90aea-e719-47bc-a306-d3502438a6d2/volume-3fb90aea-e719-47bc-a306-d3502438a6d2.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 920.950956] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80ffccf1-c4c7-4bfd-9585-bd0cdb2c8e52 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.973720] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': task-1354332, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531229} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.973720] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] cd165a78-21f9-4fc7-88e5-5ab35047eacc/cd165a78-21f9-4fc7-88e5-5ab35047eacc.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 920.974072] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 920.974424] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ab9397a-3110-4197-86ce-c0a51ae2578b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.977843] env[61629]: DEBUG oslo_vmware.api [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 920.977843] env[61629]: value = "task-1354334" [ 920.977843] env[61629]: _type = "Task" [ 920.977843] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.983788] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Waiting for the task: (returnval){ [ 920.983788] env[61629]: value = "task-1354335" [ 920.983788] env[61629]: _type = "Task" [ 920.983788] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.992848] env[61629]: DEBUG oslo_vmware.api [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354334, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.998811] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': task-1354335, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.017339] env[61629]: DEBUG nova.network.neutron [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 921.131642] env[61629]: INFO nova.compute.manager [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Took 34.27 seconds to build instance. [ 921.176922] env[61629]: DEBUG nova.network.neutron [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Updating instance_info_cache with network_info: [{"id": "57805f12-9b81-4485-8f3a-32567ed40a8c", "address": "fa:16:3e:ca:29:87", "network": {"id": "249c4ba3-38e0-421a-91b6-cf97f90eb535", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1700423127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd318d29ec50427eb997c83837120c9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57805f12-9b", "ovs_interfaceid": "57805f12-9b81-4485-8f3a-32567ed40a8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.311498] env[61629]: INFO nova.compute.resource_tracker [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Updating resource usage from migration 32006d8f-5a62-4120-b8f6-68dd596d1066 [ 921.408269] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "interface-09890839-b1d9-4558-992d-b1a6f4c5f750-d666a690-afc1-4ce0-a878-192338b5dc0d" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.408525] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "interface-09890839-b1d9-4558-992d-b1a6f4c5f750-d666a690-afc1-4ce0-a878-192338b5dc0d" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.408889] env[61629]: DEBUG nova.objects.instance [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lazy-loading 'flavor' on Instance uuid 09890839-b1d9-4558-992d-b1a6f4c5f750 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.491480] env[61629]: DEBUG oslo_vmware.api [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354334, 'name': ReconfigVM_Task, 'duration_secs': 0.177472} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.492168] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288539', 'volume_id': '3fb90aea-e719-47bc-a306-d3502438a6d2', 'name': 'volume-3fb90aea-e719-47bc-a306-d3502438a6d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2b01eeae-64be-44b3-b4cf-c2a8490043e3', 'attached_at': '', 'detached_at': '', 'volume_id': '3fb90aea-e719-47bc-a306-d3502438a6d2', 'serial': '3fb90aea-e719-47bc-a306-d3502438a6d2'} {{(pid=61629) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 921.498366] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': task-1354335, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081842} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.500750] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 921.502273] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2cfbb7-fcea-4c65-9401-5bd63569854e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.525282] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] cd165a78-21f9-4fc7-88e5-5ab35047eacc/cd165a78-21f9-4fc7-88e5-5ab35047eacc.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 921.528061] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcddd590-ef3f-4bd2-a991-334f33c1b7ac {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.547942] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Waiting for the task: (returnval){ [ 921.547942] env[61629]: value = "task-1354336" [ 921.547942] env[61629]: _type = "Task" [ 921.547942] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.554805] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': task-1354336, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.593102] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12e70eb-c219-4682-ac28-49ce1893b848 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.600341] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9e66c0-265e-4625-9624-b74f78d662d2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.630848] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e019659-21df-41fa-a8ab-5be5718cede7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.633679] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7ad109b1-5edb-4c19-b911-c467f0ca8a9c tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Lock "2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.785s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.639106] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af5e49b-1a8d-4d50-bce9-76920103eef8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.653191] env[61629]: DEBUG nova.compute.provider_tree [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 921.679771] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Releasing lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.680211] env[61629]: DEBUG nova.compute.manager [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Instance network_info: |[{"id": "57805f12-9b81-4485-8f3a-32567ed40a8c", "address": "fa:16:3e:ca:29:87", "network": {"id": "249c4ba3-38e0-421a-91b6-cf97f90eb535", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1700423127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd318d29ec50427eb997c83837120c9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57805f12-9b", "ovs_interfaceid": "57805f12-9b81-4485-8f3a-32567ed40a8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 921.680841] env[61629]: DEBUG oslo_concurrency.lockutils [req-5b4f5238-c223-455f-8b2f-4b6b1da0563f req-55f08230-cfaf-4fcc-97f3-e23488837c72 service nova] Acquired lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.681092] env[61629]: DEBUG nova.network.neutron [req-5b4f5238-c223-455f-8b2f-4b6b1da0563f req-55f08230-cfaf-4fcc-97f3-e23488837c72 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Refreshing network info cache for port 57805f12-9b81-4485-8f3a-32567ed40a8c {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 921.682289] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:29:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57805f12-9b81-4485-8f3a-32567ed40a8c', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 921.690021] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Creating folder: Project (bd318d29ec50427eb997c83837120c9c). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 921.691011] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86157e67-e634-4c68-8a8d-e9810b67a6ca {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.701567] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Created folder: Project (bd318d29ec50427eb997c83837120c9c) in parent group-v288443. [ 921.701759] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Creating folder: Instances. Parent ref: group-v288543. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 921.702028] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5a9703b-47e0-4a11-b8bc-360484e490a2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.711848] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Created folder: Instances in parent group-v288543. [ 921.712123] env[61629]: DEBUG oslo.service.loopingcall [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 921.712328] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 921.712532] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-936b1d37-fc83-4160-995a-424ea7f68a9a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.732121] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 921.732121] env[61629]: value = "task-1354339" [ 921.732121] env[61629]: _type = "Task" [ 921.732121] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.742699] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354339, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.830379] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.058281] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': task-1354336, 'name': ReconfigVM_Task, 'duration_secs': 0.357364} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.058281] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Reconfigured VM instance instance-00000056 to attach disk [datastore2] cd165a78-21f9-4fc7-88e5-5ab35047eacc/cd165a78-21f9-4fc7-88e5-5ab35047eacc.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 922.058684] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e673cfcb-5681-4429-8c23-1822749c5e01 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.064908] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Waiting for the task: (returnval){ [ 922.064908] env[61629]: value = "task-1354340" [ 922.064908] env[61629]: _type = "Task" [ 922.064908] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.075809] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': task-1354340, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.081374] env[61629]: DEBUG nova.objects.instance [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lazy-loading 'pci_requests' on Instance uuid 09890839-b1d9-4558-992d-b1a6f4c5f750 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 922.176385] env[61629]: ERROR nova.scheduler.client.report [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [req-a985f1bd-28f5-401f-becc-501d0972ebb4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID d075eff1-6f77-44a8-824e-16f3e03b4063. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a985f1bd-28f5-401f-becc-501d0972ebb4"}]} [ 922.195202] env[61629]: DEBUG nova.scheduler.client.report [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Refreshing inventories for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 922.210374] env[61629]: DEBUG nova.scheduler.client.report [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Updating ProviderTree inventory for provider d075eff1-6f77-44a8-824e-16f3e03b4063 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 922.210612] env[61629]: DEBUG nova.compute.provider_tree [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 922.224477] env[61629]: DEBUG nova.scheduler.client.report [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Refreshing aggregate associations for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063, aggregates: None {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 922.243545] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354339, 'name': CreateVM_Task, 'duration_secs': 0.371963} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.244450] env[61629]: DEBUG nova.scheduler.client.report [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Refreshing trait associations for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 922.246364] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 922.247637] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.247637] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.247736] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 922.247986] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6828604-c544-4684-a5be-8c39bb5a9eaa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.252805] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 922.252805] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5263a284-5534-5ec2-a6ad-13074079c8cd" [ 922.252805] env[61629]: _type = "Task" [ 922.252805] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.260792] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5263a284-5534-5ec2-a6ad-13074079c8cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.493136] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367543d3-ba6e-48d3-9b65-bda50a20cb55 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.501217] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e6e9d8-855e-4829-aeb2-2e9650991704 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.533336] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b0c1b5-15df-480b-a196-96e6bc1ec57a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.540702] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a819e7-23bb-4b21-9818-86b5b78cf61c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.555578] env[61629]: DEBUG nova.objects.instance [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lazy-loading 'flavor' on Instance uuid 2b01eeae-64be-44b3-b4cf-c2a8490043e3 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 922.557114] env[61629]: DEBUG nova.compute.provider_tree [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 922.574635] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': task-1354340, 'name': Rename_Task, 'duration_secs': 0.164104} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.574805] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 922.575115] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-311325ef-014e-4047-ab97-a2e8b57a8b61 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.581470] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Waiting for the task: (returnval){ [ 922.581470] env[61629]: value = "task-1354341" [ 922.581470] env[61629]: _type = "Task" [ 922.581470] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.584694] env[61629]: DEBUG nova.objects.base [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Object Instance<09890839-b1d9-4558-992d-b1a6f4c5f750> lazy-loaded attributes: flavor,pci_requests {{(pid=61629) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 922.584924] env[61629]: DEBUG nova.network.neutron [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 922.591538] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': task-1354341, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.765417] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5263a284-5534-5ec2-a6ad-13074079c8cd, 'name': SearchDatastore_Task, 'duration_secs': 0.008854} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.765798] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.765973] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 922.766237] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.766388] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.767707] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 922.767707] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a9a8655-a738-4020-948a-b081a4b9d88c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.774716] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 922.774899] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 922.775631] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4750da3b-64f8-42ce-b3c4-23080013b728 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.781061] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 922.781061] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5236180e-e9dd-518e-bde9-516a85441a51" [ 922.781061] env[61629]: _type = "Task" [ 922.781061] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.790085] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5236180e-e9dd-518e-bde9-516a85441a51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.817343] env[61629]: DEBUG nova.network.neutron [req-5b4f5238-c223-455f-8b2f-4b6b1da0563f req-55f08230-cfaf-4fcc-97f3-e23488837c72 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Updated VIF entry in instance network info cache for port 57805f12-9b81-4485-8f3a-32567ed40a8c. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 922.817343] env[61629]: DEBUG nova.network.neutron [req-5b4f5238-c223-455f-8b2f-4b6b1da0563f req-55f08230-cfaf-4fcc-97f3-e23488837c72 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Updating instance_info_cache with network_info: [{"id": "57805f12-9b81-4485-8f3a-32567ed40a8c", "address": "fa:16:3e:ca:29:87", "network": {"id": "249c4ba3-38e0-421a-91b6-cf97f90eb535", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1700423127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd318d29ec50427eb997c83837120c9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57805f12-9b", "ovs_interfaceid": "57805f12-9b81-4485-8f3a-32567ed40a8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.853969] env[61629]: DEBUG nova.policy [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38cc8b6343d54d30a3f6f13512d23020', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e7fced3a50d4821b42cf087d8111cb7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 923.064952] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b37d420f-bece-4ae9-a5ef-76e49a962e48 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.453s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.093015] env[61629]: DEBUG oslo_vmware.api [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': task-1354341, 'name': PowerOnVM_Task, 'duration_secs': 0.479154} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.093343] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 923.093719] env[61629]: INFO nova.compute.manager [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Took 7.59 seconds to spawn the instance on the hypervisor. [ 923.093816] env[61629]: DEBUG nova.compute.manager [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 923.094681] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ba62dd-e6e3-4def-8fc4-8de25f081b27 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.116216] env[61629]: DEBUG nova.scheduler.client.report [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Updated inventory for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with generation 101 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 923.116496] env[61629]: DEBUG nova.compute.provider_tree [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Updating resource provider d075eff1-6f77-44a8-824e-16f3e03b4063 generation from 101 to 102 during operation: update_inventory {{(pid=61629) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 923.116779] env[61629]: DEBUG nova.compute.provider_tree [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 923.213039] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Acquiring lock "2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.213927] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Lock "2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.214503] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Acquiring lock "2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.214881] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Lock "2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.215260] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Lock "2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.221113] env[61629]: INFO nova.compute.manager [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Terminating instance [ 923.225464] env[61629]: DEBUG nova.compute.manager [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 923.225855] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 923.226952] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f166553-dee2-40d1-a7a3-31b1f7757b37 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.239027] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 923.239027] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00574405-1f72-4167-9246-efe5ba9da20d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.246144] env[61629]: DEBUG oslo_vmware.api [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Waiting for the task: (returnval){ [ 923.246144] env[61629]: value = "task-1354342" [ 923.246144] env[61629]: _type = "Task" [ 923.246144] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.259898] env[61629]: DEBUG oslo_vmware.api [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354342, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.292936] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5236180e-e9dd-518e-bde9-516a85441a51, 'name': SearchDatastore_Task, 'duration_secs': 0.011814} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.293800] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-843f8bfb-b98d-4d25-9662-19b4d12c239a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.300145] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 923.300145] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5256aac8-c2a0-a9c2-0309-f61693ebb060" [ 923.300145] env[61629]: _type = "Task" [ 923.300145] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.308769] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5256aac8-c2a0-a9c2-0309-f61693ebb060, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.319509] env[61629]: DEBUG oslo_concurrency.lockutils [req-5b4f5238-c223-455f-8b2f-4b6b1da0563f req-55f08230-cfaf-4fcc-97f3-e23488837c72 service nova] Releasing lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.612997] env[61629]: INFO nova.compute.manager [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Took 29.14 seconds to build instance. [ 923.623073] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.336s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.623205] env[61629]: INFO nova.compute.manager [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Migrating [ 923.623444] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.623596] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "compute-rpcapi-router" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.624677] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.810s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.626370] env[61629]: INFO nova.compute.claims [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 923.630204] env[61629]: INFO nova.compute.rpcapi [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 923.630690] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "compute-rpcapi-router" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.736568] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.737097] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.760775] env[61629]: DEBUG oslo_vmware.api [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354342, 'name': PowerOffVM_Task, 'duration_secs': 0.186557} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.761260] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 923.761494] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 923.761762] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f655de5a-c5af-4dbe-8665-e3d7206d17d9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.815284] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5256aac8-c2a0-a9c2-0309-f61693ebb060, 'name': SearchDatastore_Task, 'duration_secs': 0.010209} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.815557] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.815824] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] c5b6f6b8-587c-4b74-bc83-98dac319b15b/c5b6f6b8-587c-4b74-bc83-98dac319b15b.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 923.816126] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a21499a4-839c-4311-a59a-5e9b1368c044 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.822125] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 923.822125] env[61629]: value = "task-1354344" [ 923.822125] env[61629]: _type = "Task" [ 923.822125] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.833506] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354344, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.116632] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9122db83-0de3-481c-bde9-383b75b0f40c tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Lock "cd165a78-21f9-4fc7-88e5-5ab35047eacc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.652s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.160022] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "refresh_cache-7cf87381-235e-449b-8269-61c2d4033028" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.160464] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "refresh_cache-7cf87381-235e-449b-8269-61c2d4033028" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.160534] env[61629]: DEBUG nova.network.neutron [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 924.243518] env[61629]: INFO nova.compute.manager [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Detaching volume 5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66 [ 924.297958] env[61629]: INFO nova.virt.block_device [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Attempting to driver detach volume 5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66 from mountpoint /dev/sdb [ 924.298585] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Volume detach. Driver type: vmdk {{(pid=61629) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 924.301575] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288532', 'volume_id': '5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66', 'name': 'volume-5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2b01eeae-64be-44b3-b4cf-c2a8490043e3', 'attached_at': '', 'detached_at': '', 'volume_id': '5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66', 'serial': '5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66'} {{(pid=61629) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 924.302702] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0e3f2a-aaab-435e-8121-40ac5b7eca77 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.337454] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682801d5-672c-4d2a-a4f1-e22ae33d637e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.346342] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354344, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.350033] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066ef100-faf5-457e-b7b0-fc760b485522 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.374554] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4974e91-8882-4496-866a-05ca37003a2e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.391719] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] The volume has not been displaced from its original location: [datastore1] volume-5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66/volume-5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66.vmdk. No consolidation needed. {{(pid=61629) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 924.397805] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Reconfiguring VM instance instance-00000043 to detach disk 2001 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 924.398295] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e7259b7-275e-4e42-af2d-c532b3ec9112 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.418793] env[61629]: DEBUG oslo_vmware.api [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 924.418793] env[61629]: value = "task-1354345" [ 924.418793] env[61629]: _type = "Task" [ 924.418793] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.428418] env[61629]: DEBUG oslo_vmware.api [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354345, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.514139] env[61629]: DEBUG nova.network.neutron [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Successfully updated port: d666a690-afc1-4ce0-a878-192338b5dc0d {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 924.769961] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 924.770265] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 924.770463] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Deleting the datastore file [datastore2] 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 924.770740] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69536ab8-58e6-446b-94e0-4336ae9fbdf8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.780712] env[61629]: DEBUG oslo_vmware.api [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Waiting for the task: (returnval){ [ 924.780712] env[61629]: value = "task-1354346" [ 924.780712] env[61629]: _type = "Task" [ 924.780712] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.791893] env[61629]: DEBUG oslo_vmware.api [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354346, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.815325] env[61629]: DEBUG nova.compute.manager [req-f3c35be1-2a67-4a71-b190-d99770be19e0 req-73b26a54-c2f0-43e8-9235-ce63de34868f service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Received event network-vif-plugged-d666a690-afc1-4ce0-a878-192338b5dc0d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 924.815549] env[61629]: DEBUG oslo_concurrency.lockutils [req-f3c35be1-2a67-4a71-b190-d99770be19e0 req-73b26a54-c2f0-43e8-9235-ce63de34868f service nova] Acquiring lock "09890839-b1d9-4558-992d-b1a6f4c5f750-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.817041] env[61629]: DEBUG oslo_concurrency.lockutils [req-f3c35be1-2a67-4a71-b190-d99770be19e0 req-73b26a54-c2f0-43e8-9235-ce63de34868f service nova] Lock "09890839-b1d9-4558-992d-b1a6f4c5f750-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.817041] env[61629]: DEBUG oslo_concurrency.lockutils [req-f3c35be1-2a67-4a71-b190-d99770be19e0 req-73b26a54-c2f0-43e8-9235-ce63de34868f service nova] Lock "09890839-b1d9-4558-992d-b1a6f4c5f750-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.817041] env[61629]: DEBUG nova.compute.manager [req-f3c35be1-2a67-4a71-b190-d99770be19e0 req-73b26a54-c2f0-43e8-9235-ce63de34868f service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] No waiting events found dispatching network-vif-plugged-d666a690-afc1-4ce0-a878-192338b5dc0d {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 924.817041] env[61629]: WARNING nova.compute.manager [req-f3c35be1-2a67-4a71-b190-d99770be19e0 req-73b26a54-c2f0-43e8-9235-ce63de34868f service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Received unexpected event network-vif-plugged-d666a690-afc1-4ce0-a878-192338b5dc0d for instance with vm_state active and task_state None. [ 924.847045] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354344, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.697597} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.849929] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] c5b6f6b8-587c-4b74-bc83-98dac319b15b/c5b6f6b8-587c-4b74-bc83-98dac319b15b.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 924.850230] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 924.850680] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d2cfffde-fdd7-4e5f-8d27-70c89baf40ce {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.858598] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 924.858598] env[61629]: value = "task-1354347" [ 924.858598] env[61629]: _type = "Task" [ 924.858598] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.875200] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354347, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.929788] env[61629]: DEBUG nova.network.neutron [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Updating instance_info_cache with network_info: [{"id": "e28dd480-831a-49f0-804e-ad88763d3c24", "address": "fa:16:3e:9f:0b:da", "network": {"id": "534e08bb-ebea-429f-8a3d-733c418ea99b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1143213928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6d1f876ee054beb89ca0eb0776ddcd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape28dd480-83", "ovs_interfaceid": "e28dd480-831a-49f0-804e-ad88763d3c24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.934612] env[61629]: DEBUG oslo_vmware.api [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354345, 'name': ReconfigVM_Task, 'duration_secs': 0.474769} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.935188] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Reconfigured VM instance instance-00000043 to detach disk 2001 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 924.943111] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1bdd997b-064c-41bc-b720-e1d73b00a90e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.969556] env[61629]: DEBUG oslo_vmware.api [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 924.969556] env[61629]: value = "task-1354348" [ 924.969556] env[61629]: _type = "Task" [ 924.969556] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.983674] env[61629]: DEBUG oslo_vmware.api [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354348, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.990301] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3f365c-148b-4fe0-a454-18d6c4cbf1a3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.997720] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7b553a-f94d-499a-ada5-18632a9a735b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.030466] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.030659] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.030842] env[61629]: DEBUG nova.network.neutron [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 925.033039] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ebd4a5-92df-44c7-ba0e-639b5aa1c4ad {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.041500] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64c12ca-7267-43c9-8ce1-0dbbbede7ebd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.058647] env[61629]: DEBUG nova.compute.provider_tree [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.291162] env[61629]: DEBUG oslo_vmware.api [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Task: {'id': task-1354346, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.250386} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.291445] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 925.291445] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 925.291628] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 925.291813] env[61629]: INFO nova.compute.manager [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Took 2.07 seconds to destroy the instance on the hypervisor. [ 925.292073] env[61629]: DEBUG oslo.service.loopingcall [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 925.292474] env[61629]: DEBUG nova.compute.manager [-] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 925.292474] env[61629]: DEBUG nova.network.neutron [-] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 925.350819] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Acquiring lock "ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.351250] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Lock "ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.371763] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354347, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074472} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.372106] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 925.372874] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b68f25c-14e6-4607-b342-51b81b3cc39e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.397889] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] c5b6f6b8-587c-4b74-bc83-98dac319b15b/c5b6f6b8-587c-4b74-bc83-98dac319b15b.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 925.398216] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ff57116-1e3d-424c-b4d9-adef39a88f3a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.417802] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 925.417802] env[61629]: value = "task-1354349" [ 925.417802] env[61629]: _type = "Task" [ 925.417802] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.425568] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354349, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.436265] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "refresh_cache-7cf87381-235e-449b-8269-61c2d4033028" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.478896] env[61629]: DEBUG oslo_vmware.api [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354348, 'name': ReconfigVM_Task, 'duration_secs': 0.153475} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.479275] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288532', 'volume_id': '5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66', 'name': 'volume-5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2b01eeae-64be-44b3-b4cf-c2a8490043e3', 'attached_at': '', 'detached_at': '', 'volume_id': '5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66', 'serial': '5b0f1bc3-f02a-4b74-95e2-ffb48c25ec66'} {{(pid=61629) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 925.561274] env[61629]: DEBUG nova.scheduler.client.report [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 925.581254] env[61629]: WARNING nova.network.neutron [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] 74993df5-f495-415f-bb5a-87983f0b2da1 already exists in list: networks containing: ['74993df5-f495-415f-bb5a-87983f0b2da1']. ignoring it [ 925.833939] env[61629]: DEBUG oslo_concurrency.lockutils [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Acquiring lock "cd165a78-21f9-4fc7-88e5-5ab35047eacc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.834263] env[61629]: DEBUG oslo_concurrency.lockutils [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Lock "cd165a78-21f9-4fc7-88e5-5ab35047eacc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.834486] env[61629]: DEBUG oslo_concurrency.lockutils [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Acquiring lock "cd165a78-21f9-4fc7-88e5-5ab35047eacc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.834681] env[61629]: DEBUG oslo_concurrency.lockutils [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Lock "cd165a78-21f9-4fc7-88e5-5ab35047eacc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.834985] env[61629]: DEBUG oslo_concurrency.lockutils [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Lock "cd165a78-21f9-4fc7-88e5-5ab35047eacc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.837567] env[61629]: INFO nova.compute.manager [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Terminating instance [ 925.840300] env[61629]: DEBUG nova.compute.manager [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 925.840653] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 925.842014] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6559e02b-b022-4ccc-a052-457e04985040 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.853236] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 925.857111] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff638dfe-6027-40ac-9942-dc501b48e4d8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.859442] env[61629]: DEBUG nova.compute.manager [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 925.869540] env[61629]: DEBUG oslo_vmware.api [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Waiting for the task: (returnval){ [ 925.869540] env[61629]: value = "task-1354350" [ 925.869540] env[61629]: _type = "Task" [ 925.869540] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.885376] env[61629]: DEBUG oslo_vmware.api [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': task-1354350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.928695] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354349, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.946931] env[61629]: DEBUG nova.network.neutron [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updating instance_info_cache with network_info: [{"id": "91aa1640-3097-4a26-9090-4081740f917d", "address": "fa:16:3e:d4:a8:15", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91aa1640-30", "ovs_interfaceid": "91aa1640-3097-4a26-9090-4081740f917d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d666a690-afc1-4ce0-a878-192338b5dc0d", "address": "fa:16:3e:8e:a4:c2", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd666a690-af", "ovs_interfaceid": "d666a690-afc1-4ce0-a878-192338b5dc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.027604] env[61629]: DEBUG nova.objects.instance [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lazy-loading 'flavor' on Instance uuid 2b01eeae-64be-44b3-b4cf-c2a8490043e3 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 926.066218] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.441s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.066837] env[61629]: DEBUG nova.compute.manager [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 926.070022] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.693s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.071674] env[61629]: INFO nova.compute.claims [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 926.077420] env[61629]: DEBUG nova.network.neutron [-] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.387506] env[61629]: DEBUG oslo_vmware.api [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': task-1354350, 'name': PowerOffVM_Task, 'duration_secs': 0.274793} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.387860] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 926.391769] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 926.392402] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c752ca0-181a-49ef-904f-3d629a39ccf5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.403659] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.436739] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354349, 'name': ReconfigVM_Task, 'duration_secs': 0.551752} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.437241] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Reconfigured VM instance instance-00000057 to attach disk [datastore2] c5b6f6b8-587c-4b74-bc83-98dac319b15b/c5b6f6b8-587c-4b74-bc83-98dac319b15b.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.438036] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-27e75431-7a29-442c-a34d-8a7e7187c5c6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.450060] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 926.450060] env[61629]: value = "task-1354352" [ 926.450060] env[61629]: _type = "Task" [ 926.450060] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.450629] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.451427] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.451641] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.459040] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5498001-7401-4a3a-9e1c-cf8f2650c7a3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.463879] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 926.464617] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 926.465174] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Deleting the datastore file [datastore2] cd165a78-21f9-4fc7-88e5-5ab35047eacc {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 926.466152] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3db42cf2-19e8-489a-bb33-cb114fa5b66a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.487314] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354352, 'name': Rename_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.488068] env[61629]: DEBUG nova.virt.hardware [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 926.488484] env[61629]: DEBUG nova.virt.hardware [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 926.488714] env[61629]: DEBUG nova.virt.hardware [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 926.490509] env[61629]: DEBUG nova.virt.hardware [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 926.490509] env[61629]: DEBUG nova.virt.hardware [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 926.490509] env[61629]: DEBUG nova.virt.hardware [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 926.490509] env[61629]: DEBUG nova.virt.hardware [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 926.490509] env[61629]: DEBUG nova.virt.hardware [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 926.490509] env[61629]: DEBUG nova.virt.hardware [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 926.490509] env[61629]: DEBUG nova.virt.hardware [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 926.490509] env[61629]: DEBUG nova.virt.hardware [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 926.497622] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Reconfiguring VM to attach interface {{(pid=61629) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 926.499010] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96cb0ef6-d2da-4a86-a2e9-f87ddd2e13ea {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.513709] env[61629]: DEBUG oslo_vmware.api [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Waiting for the task: (returnval){ [ 926.513709] env[61629]: value = "task-1354353" [ 926.513709] env[61629]: _type = "Task" [ 926.513709] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.519258] env[61629]: DEBUG oslo_vmware.api [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 926.519258] env[61629]: value = "task-1354354" [ 926.519258] env[61629]: _type = "Task" [ 926.519258] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.526402] env[61629]: DEBUG oslo_vmware.api [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': task-1354353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.529443] env[61629]: DEBUG oslo_vmware.api [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354354, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.575636] env[61629]: DEBUG nova.compute.utils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 926.577711] env[61629]: DEBUG nova.compute.manager [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 926.577711] env[61629]: DEBUG nova.network.neutron [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 926.582352] env[61629]: INFO nova.compute.manager [-] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Took 1.29 seconds to deallocate network for instance. [ 926.623268] env[61629]: DEBUG nova.policy [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc766b0845b443a8a92346e5d032baca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87909880104e4519b42cb204f366af3f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 926.919333] env[61629]: DEBUG nova.compute.manager [req-225b0f64-643b-4783-89e9-28619f57cc0b req-0eb6414d-9829-4c57-823e-6f4db3e9bf81 service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Received event network-changed-d666a690-afc1-4ce0-a878-192338b5dc0d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 926.919509] env[61629]: DEBUG nova.compute.manager [req-225b0f64-643b-4783-89e9-28619f57cc0b req-0eb6414d-9829-4c57-823e-6f4db3e9bf81 service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Refreshing instance network info cache due to event network-changed-d666a690-afc1-4ce0-a878-192338b5dc0d. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 926.919765] env[61629]: DEBUG oslo_concurrency.lockutils [req-225b0f64-643b-4783-89e9-28619f57cc0b req-0eb6414d-9829-4c57-823e-6f4db3e9bf81 service nova] Acquiring lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.919925] env[61629]: DEBUG oslo_concurrency.lockutils [req-225b0f64-643b-4783-89e9-28619f57cc0b req-0eb6414d-9829-4c57-823e-6f4db3e9bf81 service nova] Acquired lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.920163] env[61629]: DEBUG nova.network.neutron [req-225b0f64-643b-4783-89e9-28619f57cc0b req-0eb6414d-9829-4c57-823e-6f4db3e9bf81 service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Refreshing network info cache for port d666a690-afc1-4ce0-a878-192338b5dc0d {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 926.947442] env[61629]: DEBUG nova.network.neutron [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Successfully created port: c827ba81-d74a-4ff3-bfc2-81b5e09c683c {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 926.961056] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531a2e93-b627-4f2e-85ef-3cd458ad4ac5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.967089] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354352, 'name': Rename_Task, 'duration_secs': 0.175539} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.967712] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 926.967998] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ed3e5c9-54b0-42de-bff4-044503d0dfce {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.983868] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Updating instance '7cf87381-235e-449b-8269-61c2d4033028' progress to 0 {{(pid=61629) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 926.992951] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 926.992951] env[61629]: value = "task-1354355" [ 926.992951] env[61629]: _type = "Task" [ 926.992951] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.001027] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354355, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.027281] env[61629]: DEBUG oslo_vmware.api [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Task: {'id': task-1354353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206295} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.028173] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 927.028533] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 927.028709] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 927.028966] env[61629]: INFO nova.compute.manager [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Took 1.19 seconds to destroy the instance on the hypervisor. [ 927.029483] env[61629]: DEBUG oslo.service.loopingcall [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 927.032825] env[61629]: DEBUG nova.compute.manager [-] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 927.032937] env[61629]: DEBUG nova.network.neutron [-] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 927.034746] env[61629]: DEBUG oslo_vmware.api [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354354, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.035173] env[61629]: DEBUG oslo_concurrency.lockutils [None req-a63fcfd4-e33d-4341-82c9-614607313ef9 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.298s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.072840] env[61629]: DEBUG oslo_concurrency.lockutils [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.073146] env[61629]: DEBUG oslo_concurrency.lockutils [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.081112] env[61629]: DEBUG nova.compute.manager [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 927.089339] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.408242] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6563f230-19eb-4a42-8f8e-04d2604c1802 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.417589] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2dba9a3-1298-4568-9689-63793c89e246 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.451859] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a42cee-4759-454a-989e-988b04ad6f95 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.461704] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da16bc31-e69d-474f-945f-e3e77fdffec9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.477569] env[61629]: DEBUG nova.compute.provider_tree [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 927.489926] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 927.490449] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b0222cb6-ec68-4cb9-bbc1-4b6970b98873 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.498292] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 927.498292] env[61629]: value = "task-1354356" [ 927.498292] env[61629]: _type = "Task" [ 927.498292] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.505143] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354355, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.512279] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354356, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.530014] env[61629]: DEBUG oslo_vmware.api [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354354, 'name': ReconfigVM_Task, 'duration_secs': 0.582732} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.530158] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.530419] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Reconfigured VM to attach interface {{(pid=61629) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 927.577023] env[61629]: INFO nova.compute.manager [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Detaching volume 3fb90aea-e719-47bc-a306-d3502438a6d2 [ 927.623649] env[61629]: INFO nova.virt.block_device [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Attempting to driver detach volume 3fb90aea-e719-47bc-a306-d3502438a6d2 from mountpoint /dev/sdc [ 927.623649] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Volume detach. Driver type: vmdk {{(pid=61629) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 927.623649] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288539', 'volume_id': '3fb90aea-e719-47bc-a306-d3502438a6d2', 'name': 'volume-3fb90aea-e719-47bc-a306-d3502438a6d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2b01eeae-64be-44b3-b4cf-c2a8490043e3', 'attached_at': '', 'detached_at': '', 'volume_id': '3fb90aea-e719-47bc-a306-d3502438a6d2', 'serial': '3fb90aea-e719-47bc-a306-d3502438a6d2'} {{(pid=61629) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 927.624541] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac233458-a559-4266-8479-7a31ddcc22fa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.652640] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb3a149-67c8-4492-ad10-de31ae049abf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.660476] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ebceec5-e2fd-4b39-8831-88aa7f03c436 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.681730] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce39c30-755d-40c8-99a9-7949fabd97d9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.697048] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] The volume has not been displaced from its original location: [datastore2] volume-3fb90aea-e719-47bc-a306-d3502438a6d2/volume-3fb90aea-e719-47bc-a306-d3502438a6d2.vmdk. No consolidation needed. {{(pid=61629) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 927.702684] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Reconfiguring VM instance instance-00000043 to detach disk 2002 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 927.705209] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0f6acde-3d63-444c-92c5-bcadc53adaf0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.726018] env[61629]: DEBUG oslo_vmware.api [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 927.726018] env[61629]: value = "task-1354357" [ 927.726018] env[61629]: _type = "Task" [ 927.726018] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.735367] env[61629]: DEBUG oslo_vmware.api [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354357, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.754269] env[61629]: DEBUG nova.network.neutron [req-225b0f64-643b-4783-89e9-28619f57cc0b req-0eb6414d-9829-4c57-823e-6f4db3e9bf81 service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updated VIF entry in instance network info cache for port d666a690-afc1-4ce0-a878-192338b5dc0d. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 927.754769] env[61629]: DEBUG nova.network.neutron [req-225b0f64-643b-4783-89e9-28619f57cc0b req-0eb6414d-9829-4c57-823e-6f4db3e9bf81 service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updating instance_info_cache with network_info: [{"id": "91aa1640-3097-4a26-9090-4081740f917d", "address": "fa:16:3e:d4:a8:15", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91aa1640-30", "ovs_interfaceid": "91aa1640-3097-4a26-9090-4081740f917d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d666a690-afc1-4ce0-a878-192338b5dc0d", "address": "fa:16:3e:8e:a4:c2", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd666a690-af", "ovs_interfaceid": "d666a690-afc1-4ce0-a878-192338b5dc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.957418] env[61629]: DEBUG nova.network.neutron [-] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.998271] env[61629]: ERROR nova.scheduler.client.report [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [req-37484787-dddf-483c-8f1f-35b8875d176c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID d075eff1-6f77-44a8-824e-16f3e03b4063. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-37484787-dddf-483c-8f1f-35b8875d176c"}]} [ 928.006580] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354355, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.011242] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354356, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.014864] env[61629]: DEBUG nova.scheduler.client.report [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Refreshing inventories for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 928.028317] env[61629]: DEBUG nova.scheduler.client.report [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Updating ProviderTree inventory for provider d075eff1-6f77-44a8-824e-16f3e03b4063 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 928.028612] env[61629]: DEBUG nova.compute.provider_tree [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 928.035673] env[61629]: DEBUG oslo_concurrency.lockutils [None req-32b22676-9742-4636-9409-98c3ba5a0d45 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "interface-09890839-b1d9-4558-992d-b1a6f4c5f750-d666a690-afc1-4ce0-a878-192338b5dc0d" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.627s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.041741] env[61629]: DEBUG nova.scheduler.client.report [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Refreshing aggregate associations for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063, aggregates: None {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 928.060314] env[61629]: DEBUG nova.scheduler.client.report [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Refreshing trait associations for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 928.093242] env[61629]: DEBUG nova.compute.manager [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 928.117723] env[61629]: DEBUG nova.virt.hardware [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 928.117984] env[61629]: DEBUG nova.virt.hardware [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 928.118184] env[61629]: DEBUG nova.virt.hardware [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 928.118381] env[61629]: DEBUG nova.virt.hardware [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 928.118536] env[61629]: DEBUG nova.virt.hardware [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 928.118688] env[61629]: DEBUG nova.virt.hardware [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 928.118899] env[61629]: DEBUG nova.virt.hardware [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 928.119077] env[61629]: DEBUG nova.virt.hardware [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 928.119255] env[61629]: DEBUG nova.virt.hardware [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 928.119422] env[61629]: DEBUG nova.virt.hardware [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 928.119596] env[61629]: DEBUG nova.virt.hardware [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 928.120753] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc14444-b4d8-4d0a-a345-b345ebbcfb38 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.130919] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab01e32-735b-48d2-81b2-63eddae68cb0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.236332] env[61629]: DEBUG oslo_vmware.api [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354357, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.257681] env[61629]: DEBUG oslo_concurrency.lockutils [req-225b0f64-643b-4783-89e9-28619f57cc0b req-0eb6414d-9829-4c57-823e-6f4db3e9bf81 service nova] Releasing lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.257951] env[61629]: DEBUG nova.compute.manager [req-225b0f64-643b-4783-89e9-28619f57cc0b req-0eb6414d-9829-4c57-823e-6f4db3e9bf81 service nova] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Received event network-vif-deleted-787f3ff1-d4f3-429f-8ee5-a5785d993cfc {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 928.285524] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf078cc-c26b-4633-9530-c20cfc597c65 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.292996] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf8ee68-23d0-4000-a877-bb9750183969 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.323877] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e37ae704-4cb5-4f8d-89a8-8afd04c84f82 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.331195] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7765c7a-10b7-46df-9703-f140b919d7e6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.345953] env[61629]: DEBUG nova.compute.provider_tree [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 928.460525] env[61629]: INFO nova.compute.manager [-] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Took 1.43 seconds to deallocate network for instance. [ 928.508153] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354355, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.513387] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354356, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.666586] env[61629]: DEBUG nova.network.neutron [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Successfully updated port: c827ba81-d74a-4ff3-bfc2-81b5e09c683c {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 928.737128] env[61629]: DEBUG oslo_vmware.api [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354357, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.967958] env[61629]: DEBUG oslo_concurrency.lockutils [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.007864] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354355, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.012475] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354356, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.238531] env[61629]: DEBUG oslo_vmware.api [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354357, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.504982] env[61629]: DEBUG oslo_vmware.api [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354355, 'name': PowerOnVM_Task, 'duration_secs': 2.365115} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.508111] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 929.508372] env[61629]: INFO nova.compute.manager [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Took 11.56 seconds to spawn the instance on the hypervisor. [ 929.508664] env[61629]: DEBUG nova.compute.manager [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 929.509429] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad9cf10-b767-49b4-b340-f77424152bf6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.515994] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354356, 'name': PowerOffVM_Task, 'duration_secs': 1.988966} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.517421] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 929.517619] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Updating instance '7cf87381-235e-449b-8269-61c2d4033028' progress to 17 {{(pid=61629) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 929.688307] env[61629]: DEBUG nova.scheduler.client.report [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Updated inventory for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with generation 103 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 929.688541] env[61629]: DEBUG nova.compute.provider_tree [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Updating resource provider d075eff1-6f77-44a8-824e-16f3e03b4063 generation from 103 to 104 during operation: update_inventory {{(pid=61629) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 929.688843] env[61629]: DEBUG nova.compute.provider_tree [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 929.712492] env[61629]: DEBUG nova.compute.manager [req-0438b64c-2b65-44dc-89ef-22ee1d6aca1c req-8361961a-87b3-4935-877b-62fd7e99b827 service nova] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Received event network-vif-deleted-dd6bf404-0e6a-4868-a42a-abc64b875fa8 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.713858] env[61629]: DEBUG nova.compute.manager [req-0438b64c-2b65-44dc-89ef-22ee1d6aca1c req-8361961a-87b3-4935-877b-62fd7e99b827 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Received event network-vif-plugged-c827ba81-d74a-4ff3-bfc2-81b5e09c683c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.713858] env[61629]: DEBUG oslo_concurrency.lockutils [req-0438b64c-2b65-44dc-89ef-22ee1d6aca1c req-8361961a-87b3-4935-877b-62fd7e99b827 service nova] Acquiring lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.713858] env[61629]: DEBUG oslo_concurrency.lockutils [req-0438b64c-2b65-44dc-89ef-22ee1d6aca1c req-8361961a-87b3-4935-877b-62fd7e99b827 service nova] Lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.713858] env[61629]: DEBUG oslo_concurrency.lockutils [req-0438b64c-2b65-44dc-89ef-22ee1d6aca1c req-8361961a-87b3-4935-877b-62fd7e99b827 service nova] Lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.713858] env[61629]: DEBUG nova.compute.manager [req-0438b64c-2b65-44dc-89ef-22ee1d6aca1c req-8361961a-87b3-4935-877b-62fd7e99b827 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] No waiting events found dispatching network-vif-plugged-c827ba81-d74a-4ff3-bfc2-81b5e09c683c {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 929.713858] env[61629]: WARNING nova.compute.manager [req-0438b64c-2b65-44dc-89ef-22ee1d6aca1c req-8361961a-87b3-4935-877b-62fd7e99b827 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Received unexpected event network-vif-plugged-c827ba81-d74a-4ff3-bfc2-81b5e09c683c for instance with vm_state building and task_state spawning. [ 929.713858] env[61629]: DEBUG nova.compute.manager [req-0438b64c-2b65-44dc-89ef-22ee1d6aca1c req-8361961a-87b3-4935-877b-62fd7e99b827 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Received event network-changed-c827ba81-d74a-4ff3-bfc2-81b5e09c683c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.714700] env[61629]: DEBUG nova.compute.manager [req-0438b64c-2b65-44dc-89ef-22ee1d6aca1c req-8361961a-87b3-4935-877b-62fd7e99b827 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Refreshing instance network info cache due to event network-changed-c827ba81-d74a-4ff3-bfc2-81b5e09c683c. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 929.714700] env[61629]: DEBUG oslo_concurrency.lockutils [req-0438b64c-2b65-44dc-89ef-22ee1d6aca1c req-8361961a-87b3-4935-877b-62fd7e99b827 service nova] Acquiring lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.714700] env[61629]: DEBUG oslo_concurrency.lockutils [req-0438b64c-2b65-44dc-89ef-22ee1d6aca1c req-8361961a-87b3-4935-877b-62fd7e99b827 service nova] Acquired lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.714700] env[61629]: DEBUG nova.network.neutron [req-0438b64c-2b65-44dc-89ef-22ee1d6aca1c req-8361961a-87b3-4935-877b-62fd7e99b827 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Refreshing network info cache for port c827ba81-d74a-4ff3-bfc2-81b5e09c683c {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 929.737457] env[61629]: DEBUG oslo_vmware.api [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354357, 'name': ReconfigVM_Task, 'duration_secs': 1.688366} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.737909] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Reconfigured VM instance instance-00000043 to detach disk 2002 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 929.742888] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8caf0e3b-494e-4f83-8a57-db47069b2b9c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.759336] env[61629]: DEBUG oslo_vmware.api [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 929.759336] env[61629]: value = "task-1354358" [ 929.759336] env[61629]: _type = "Task" [ 929.759336] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.769195] env[61629]: DEBUG oslo_vmware.api [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354358, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.026059] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 930.026330] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 930.026492] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.026675] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 930.026825] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.026982] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 930.027232] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 930.027433] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 930.027560] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 930.027724] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 930.027898] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 930.036638] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9872232d-b7b1-409f-a544-f3898c1af44b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.050080] env[61629]: INFO nova.compute.manager [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Took 34.01 seconds to build instance. [ 930.055592] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 930.055592] env[61629]: value = "task-1354359" [ 930.055592] env[61629]: _type = "Task" [ 930.055592] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.065181] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354359, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.171366] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.193863] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.124s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.194435] env[61629]: DEBUG nova.compute.manager [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 930.197869] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.824s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.197869] env[61629]: DEBUG nova.objects.instance [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lazy-loading 'resources' on Instance uuid d37958f8-7607-418b-9cfd-c3a5df721e94 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 930.264324] env[61629]: DEBUG nova.network.neutron [req-0438b64c-2b65-44dc-89ef-22ee1d6aca1c req-8361961a-87b3-4935-877b-62fd7e99b827 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 930.272145] env[61629]: DEBUG oslo_vmware.api [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354358, 'name': ReconfigVM_Task, 'duration_secs': 0.179114} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.272411] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288539', 'volume_id': '3fb90aea-e719-47bc-a306-d3502438a6d2', 'name': 'volume-3fb90aea-e719-47bc-a306-d3502438a6d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2b01eeae-64be-44b3-b4cf-c2a8490043e3', 'attached_at': '', 'detached_at': '', 'volume_id': '3fb90aea-e719-47bc-a306-d3502438a6d2', 'serial': '3fb90aea-e719-47bc-a306-d3502438a6d2'} {{(pid=61629) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 930.345276] env[61629]: DEBUG nova.network.neutron [req-0438b64c-2b65-44dc-89ef-22ee1d6aca1c req-8361961a-87b3-4935-877b-62fd7e99b827 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.554878] env[61629]: DEBUG oslo_concurrency.lockutils [None req-81c16bd0-56f8-46f5-abb1-8883ee366b41 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.521s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.556435] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "interface-09890839-b1d9-4558-992d-b1a6f4c5f750-d666a690-afc1-4ce0-a878-192338b5dc0d" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.556745] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "interface-09890839-b1d9-4558-992d-b1a6f4c5f750-d666a690-afc1-4ce0-a878-192338b5dc0d" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.567142] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354359, 'name': ReconfigVM_Task, 'duration_secs': 0.133668} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.568061] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Updating instance '7cf87381-235e-449b-8269-61c2d4033028' progress to 33 {{(pid=61629) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 930.700686] env[61629]: DEBUG nova.compute.utils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 930.702248] env[61629]: DEBUG nova.compute.manager [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 930.702468] env[61629]: DEBUG nova.network.neutron [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 930.784058] env[61629]: DEBUG nova.policy [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c62f9a7c8b5f4ef985880339407b46a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0578ce75c37942d4ba6c8b862ceb7d92', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 930.831566] env[61629]: DEBUG nova.objects.instance [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lazy-loading 'flavor' on Instance uuid 2b01eeae-64be-44b3-b4cf-c2a8490043e3 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 930.847748] env[61629]: DEBUG oslo_concurrency.lockutils [req-0438b64c-2b65-44dc-89ef-22ee1d6aca1c req-8361961a-87b3-4935-877b-62fd7e99b827 service nova] Releasing lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.848368] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.848523] env[61629]: DEBUG nova.network.neutron [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 930.998138] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a982db57-9502-4b2a-9bf4-85cd7e36cab1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.004827] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.005337] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.009808] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c07af9b2-4097-48fe-b673-d98d3a99edba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.049305] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8b9da3-e482-4ffe-874d-9e9335d5ae9e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.057539] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3df3d04-3cf3-4441-9512-4db7532dd648 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.062060] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.062368] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.066057] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3247c848-4713-450d-ab9c-6064f68a6054 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.077080] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 931.077451] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 931.077728] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 931.078111] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 931.078447] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 931.078800] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 931.079144] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 931.079436] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 931.079741] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 931.080043] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 931.080346] env[61629]: DEBUG nova.virt.hardware [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 931.085586] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Reconfiguring VM instance instance-00000052 to detach disk 2000 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 931.086183] env[61629]: DEBUG nova.compute.provider_tree [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.088034] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b754c4fc-bda2-41ff-995b-34711b49cab1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.102659] env[61629]: DEBUG nova.scheduler.client.report [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 931.119258] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.922s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.122398] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877d015e-7b73-45b1-a048-f6f9b99d2d79 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.125729] env[61629]: DEBUG oslo_concurrency.lockutils [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.218s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.125729] env[61629]: DEBUG nova.objects.instance [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lazy-loading 'resources' on Instance uuid a42d5132-22e5-4551-80d2-fb7a55a7fa9e {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.129042] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 931.129042] env[61629]: value = "task-1354360" [ 931.129042] env[61629]: _type = "Task" [ 931.129042] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.155139] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Reconfiguring VM to detach interface {{(pid=61629) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 931.156326] env[61629]: INFO nova.scheduler.client.report [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Deleted allocations for instance d37958f8-7607-418b-9cfd-c3a5df721e94 [ 931.160788] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e10cb997-059e-4a4a-9323-e0d74446a989 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.181546] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354360, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.187250] env[61629]: DEBUG oslo_vmware.api [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 931.187250] env[61629]: value = "task-1354361" [ 931.187250] env[61629]: _type = "Task" [ 931.187250] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.191560] env[61629]: DEBUG oslo_vmware.api [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354361, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.210020] env[61629]: DEBUG nova.compute.manager [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 931.223836] env[61629]: DEBUG nova.network.neutron [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Successfully created port: 8a6b1581-a94f-479e-b2c5-2ee0d1cabe06 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 931.381034] env[61629]: DEBUG nova.network.neutron [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 931.510396] env[61629]: DEBUG nova.compute.manager [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 931.520721] env[61629]: DEBUG nova.network.neutron [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Updating instance_info_cache with network_info: [{"id": "c827ba81-d74a-4ff3-bfc2-81b5e09c683c", "address": "fa:16:3e:6d:96:2d", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc827ba81-d7", "ovs_interfaceid": "c827ba81-d74a-4ff3-bfc2-81b5e09c683c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.656517] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354360, 'name': ReconfigVM_Task, 'duration_secs': 0.182626} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.658863] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Reconfigured VM instance instance-00000052 to detach disk 2000 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 931.659806] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140929ca-969c-41e9-a464-82b8f6eec73b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.681677] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 7cf87381-235e-449b-8269-61c2d4033028/7cf87381-235e-449b-8269-61c2d4033028.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 931.686186] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b163cb36-4e4d-43ff-ad82-f74e7e2c9901 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.699245] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7e2e62ed-cef7-4b2b-ae3c-5d86ff94096f tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "d37958f8-7607-418b-9cfd-c3a5df721e94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.821s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.710661] env[61629]: DEBUG oslo_vmware.api [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.718467] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 931.718467] env[61629]: value = "task-1354362" [ 931.718467] env[61629]: _type = "Task" [ 931.718467] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.729159] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354362, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.838929] env[61629]: DEBUG oslo_concurrency.lockutils [None req-49217703-e844-4ccb-9965-53d4c88ffc62 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.765s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.928593] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024436a1-137d-4e79-90e1-028727b34b53 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.939196] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717854a3-6717-4b44-b9aa-64318867bafb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.970033] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8568a87c-c21a-4786-89c5-9c9e034eb6d4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.977322] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fcccebf-c788-4815-acc4-6a642bd8f191 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.991015] env[61629]: DEBUG nova.compute.provider_tree [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.026476] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.026476] env[61629]: DEBUG nova.compute.manager [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Instance network_info: |[{"id": "c827ba81-d74a-4ff3-bfc2-81b5e09c683c", "address": "fa:16:3e:6d:96:2d", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc827ba81-d7", "ovs_interfaceid": "c827ba81-d74a-4ff3-bfc2-81b5e09c683c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 932.026476] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:96:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c827ba81-d74a-4ff3-bfc2-81b5e09c683c', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 932.033767] env[61629]: DEBUG oslo.service.loopingcall [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 932.034842] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.035392] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 932.037039] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be79d2e7-d69f-4a47-b60e-294dfb8dca3c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.054787] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 932.054787] env[61629]: value = "task-1354363" [ 932.054787] env[61629]: _type = "Task" [ 932.054787] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.062518] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354363, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.212319] env[61629]: DEBUG oslo_vmware.api [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.220766] env[61629]: DEBUG nova.compute.manager [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 932.233593] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354362, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.248717] env[61629]: DEBUG nova.virt.hardware [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 932.249016] env[61629]: DEBUG nova.virt.hardware [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 932.249206] env[61629]: DEBUG nova.virt.hardware [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 932.249415] env[61629]: DEBUG nova.virt.hardware [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 932.249583] env[61629]: DEBUG nova.virt.hardware [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 932.249749] env[61629]: DEBUG nova.virt.hardware [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 932.249976] env[61629]: DEBUG nova.virt.hardware [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 932.250165] env[61629]: DEBUG nova.virt.hardware [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 932.250401] env[61629]: DEBUG nova.virt.hardware [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 932.250735] env[61629]: DEBUG nova.virt.hardware [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 932.250814] env[61629]: DEBUG nova.virt.hardware [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 932.251684] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3473aed8-3e98-4a4f-b802-254c4c22ba68 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.260623] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b34fd7-d323-4b72-9600-9a4c2c4a9443 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.494204] env[61629]: DEBUG nova.scheduler.client.report [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 932.565295] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354363, 'name': CreateVM_Task, 'duration_secs': 0.333758} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.565487] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 932.566196] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.566370] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.566935] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 932.567226] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12e3325e-8aa5-4db1-8598-5a90d5510f5f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.572132] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 932.572132] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52d548a2-79ef-8e92-2fc4-45768d004154" [ 932.572132] env[61629]: _type = "Task" [ 932.572132] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.579787] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d548a2-79ef-8e92-2fc4-45768d004154, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.585238] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.585456] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.585657] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "9c340ca1-75e0-4d65-8aae-0d5e11ff3e66-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.585843] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "9c340ca1-75e0-4d65-8aae-0d5e11ff3e66-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.586032] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "9c340ca1-75e0-4d65-8aae-0d5e11ff3e66-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.587945] env[61629]: INFO nova.compute.manager [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Terminating instance [ 932.589663] env[61629]: DEBUG nova.compute.manager [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 932.589865] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 932.590602] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267da922-e01f-44a7-9452-03fb58c64c3f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.596808] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 932.597041] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef770707-0839-4fd6-9663-8d879282cf87 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.602783] env[61629]: DEBUG oslo_vmware.api [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 932.602783] env[61629]: value = "task-1354364" [ 932.602783] env[61629]: _type = "Task" [ 932.602783] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.609751] env[61629]: DEBUG oslo_vmware.api [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354364, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.623430] env[61629]: DEBUG oslo_concurrency.lockutils [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.623688] env[61629]: DEBUG oslo_concurrency.lockutils [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.623903] env[61629]: DEBUG oslo_concurrency.lockutils [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.624088] env[61629]: DEBUG oslo_concurrency.lockutils [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.624265] env[61629]: DEBUG oslo_concurrency.lockutils [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.626542] env[61629]: INFO nova.compute.manager [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Terminating instance [ 932.628479] env[61629]: DEBUG nova.compute.manager [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 932.628827] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 932.629502] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd2c8c7-a1a2-40d4-8c3c-73c21e1527fc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.636613] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 932.636848] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44f93c24-0075-4d7c-a70e-465cc608af5d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.640036] env[61629]: DEBUG nova.compute.manager [req-9d17379c-c6a8-4213-992f-6929ca57d792 req-d90e87c2-2128-4683-be5c-dda0371ceda9 service nova] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Received event network-vif-plugged-8a6b1581-a94f-479e-b2c5-2ee0d1cabe06 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 932.640210] env[61629]: DEBUG oslo_concurrency.lockutils [req-9d17379c-c6a8-4213-992f-6929ca57d792 req-d90e87c2-2128-4683-be5c-dda0371ceda9 service nova] Acquiring lock "2ce60374-7baf-4d27-afbd-dcfaf6600a78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.640403] env[61629]: DEBUG oslo_concurrency.lockutils [req-9d17379c-c6a8-4213-992f-6929ca57d792 req-d90e87c2-2128-4683-be5c-dda0371ceda9 service nova] Lock "2ce60374-7baf-4d27-afbd-dcfaf6600a78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.640553] env[61629]: DEBUG oslo_concurrency.lockutils [req-9d17379c-c6a8-4213-992f-6929ca57d792 req-d90e87c2-2128-4683-be5c-dda0371ceda9 service nova] Lock "2ce60374-7baf-4d27-afbd-dcfaf6600a78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.640723] env[61629]: DEBUG nova.compute.manager [req-9d17379c-c6a8-4213-992f-6929ca57d792 req-d90e87c2-2128-4683-be5c-dda0371ceda9 service nova] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] No waiting events found dispatching network-vif-plugged-8a6b1581-a94f-479e-b2c5-2ee0d1cabe06 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 932.640891] env[61629]: WARNING nova.compute.manager [req-9d17379c-c6a8-4213-992f-6929ca57d792 req-d90e87c2-2128-4683-be5c-dda0371ceda9 service nova] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Received unexpected event network-vif-plugged-8a6b1581-a94f-479e-b2c5-2ee0d1cabe06 for instance with vm_state building and task_state spawning. [ 932.645954] env[61629]: DEBUG oslo_vmware.api [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 932.645954] env[61629]: value = "task-1354365" [ 932.645954] env[61629]: _type = "Task" [ 932.645954] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.653986] env[61629]: DEBUG oslo_vmware.api [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354365, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.710384] env[61629]: DEBUG oslo_vmware.api [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.732632] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354362, 'name': ReconfigVM_Task, 'duration_secs': 0.582819} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.733554] env[61629]: DEBUG nova.network.neutron [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Successfully updated port: 8a6b1581-a94f-479e-b2c5-2ee0d1cabe06 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 932.734859] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 7cf87381-235e-449b-8269-61c2d4033028/7cf87381-235e-449b-8269-61c2d4033028.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.735190] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Updating instance '7cf87381-235e-449b-8269-61c2d4033028' progress to 50 {{(pid=61629) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 932.999566] env[61629]: DEBUG oslo_concurrency.lockutils [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.875s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.002488] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.896s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.002731] env[61629]: DEBUG nova.objects.instance [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lazy-loading 'resources' on Instance uuid 3085a70f-360c-43a3-80d7-e7b87fb3e146 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 933.023973] env[61629]: INFO nova.scheduler.client.report [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleted allocations for instance a42d5132-22e5-4551-80d2-fb7a55a7fa9e [ 933.084615] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d548a2-79ef-8e92-2fc4-45768d004154, 'name': SearchDatastore_Task, 'duration_secs': 0.02958} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.085035] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.085252] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.085504] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.085638] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.085944] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.086098] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-943baf62-421e-493f-a391-8c9bcb9c127a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.099519] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.099728] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 933.100487] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-719bc673-f296-44a8-9ae0-2b88bc1cbc4d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.107708] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 933.107708] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52c0d78b-d479-fb09-0142-deb07cdb02fd" [ 933.107708] env[61629]: _type = "Task" [ 933.107708] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.113569] env[61629]: DEBUG oslo_vmware.api [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354364, 'name': PowerOffVM_Task, 'duration_secs': 0.220407} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.114278] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 933.114383] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 933.114550] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b8ef536-7e7e-47ea-907b-f962a78b45d4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.118440] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c0d78b-d479-fb09-0142-deb07cdb02fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.154282] env[61629]: DEBUG oslo_vmware.api [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354365, 'name': PowerOffVM_Task, 'duration_secs': 0.177798} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.154518] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 933.154693] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 933.154939] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61a38552-b74f-41b2-bfab-a8e19265df88 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.194590] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 933.194824] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 933.195042] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Deleting the datastore file [datastore1] 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 933.195299] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a152e6be-ff9f-4866-9ac8-e0e735bd2458 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.201154] env[61629]: DEBUG oslo_vmware.api [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for the task: (returnval){ [ 933.201154] env[61629]: value = "task-1354368" [ 933.201154] env[61629]: _type = "Task" [ 933.201154] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.211967] env[61629]: DEBUG oslo_vmware.api [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.214898] env[61629]: DEBUG oslo_vmware.api [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354368, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.217561] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 933.217830] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 933.218028] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Deleting the datastore file [datastore2] 2b01eeae-64be-44b3-b4cf-c2a8490043e3 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 933.218319] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a9299607-6c20-4a25-b7e8-7cf8804cd952 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.223926] env[61629]: DEBUG oslo_vmware.api [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for the task: (returnval){ [ 933.223926] env[61629]: value = "task-1354369" [ 933.223926] env[61629]: _type = "Task" [ 933.223926] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.231149] env[61629]: DEBUG oslo_vmware.api [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354369, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.240225] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "refresh_cache-2ce60374-7baf-4d27-afbd-dcfaf6600a78" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.240225] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "refresh_cache-2ce60374-7baf-4d27-afbd-dcfaf6600a78" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.240225] env[61629]: DEBUG nova.network.neutron [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 933.243542] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940c16b3-9818-4b24-bd1a-c972b742e10e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.265068] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6df526-543b-4d83-9bbd-2776751839b6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.285433] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Updating instance '7cf87381-235e-449b-8269-61c2d4033028' progress to 67 {{(pid=61629) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 933.530907] env[61629]: DEBUG oslo_concurrency.lockutils [None req-443eba36-c27a-4eda-a78a-2483d999947d tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a42d5132-22e5-4551-80d2-fb7a55a7fa9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.293s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.617479] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c0d78b-d479-fb09-0142-deb07cdb02fd, 'name': SearchDatastore_Task, 'duration_secs': 0.014253} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.620458] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de6798b3-c9af-4a22-a777-4387510bd2ac {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.627668] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 933.627668] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]521d90e5-66cb-4c84-f133-e7a14417eb8b" [ 933.627668] env[61629]: _type = "Task" [ 933.627668] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.637046] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]521d90e5-66cb-4c84-f133-e7a14417eb8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.714253] env[61629]: DEBUG oslo_vmware.api [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.718610] env[61629]: DEBUG oslo_vmware.api [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Task: {'id': task-1354368, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17486} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.718708] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 933.718884] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 933.719084] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 933.719268] env[61629]: INFO nova.compute.manager [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Took 1.13 seconds to destroy the instance on the hypervisor. [ 933.719534] env[61629]: DEBUG oslo.service.loopingcall [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 933.719963] env[61629]: DEBUG nova.compute.manager [-] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 933.719963] env[61629]: DEBUG nova.network.neutron [-] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 933.734200] env[61629]: DEBUG oslo_vmware.api [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Task: {'id': task-1354369, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149558} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.734512] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 933.734720] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 933.734933] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 933.735130] env[61629]: INFO nova.compute.manager [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Took 1.11 seconds to destroy the instance on the hypervisor. [ 933.735579] env[61629]: DEBUG oslo.service.loopingcall [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 933.736730] env[61629]: DEBUG nova.compute.manager [-] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 933.736965] env[61629]: DEBUG nova.network.neutron [-] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 933.739120] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de83ebf0-ae16-411c-96c4-48236cc269f7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.746113] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7a3939-32b6-48e2-a577-c8b09523f51f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.782831] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c595171d-b37a-4e1a-8989-4cddcfb23223 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.793732] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed47fc0-d265-4fd4-98cb-cc4549c13ffb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.808374] env[61629]: DEBUG nova.compute.provider_tree [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.810171] env[61629]: DEBUG nova.network.neutron [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 933.847974] env[61629]: DEBUG nova.network.neutron [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Port e28dd480-831a-49f0-804e-ad88763d3c24 binding to destination host cpu-1 is already ACTIVE {{(pid=61629) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 934.121035] env[61629]: DEBUG nova.network.neutron [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Updating instance_info_cache with network_info: [{"id": "8a6b1581-a94f-479e-b2c5-2ee0d1cabe06", "address": "fa:16:3e:fe:c3:b4", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a6b1581-a9", "ovs_interfaceid": "8a6b1581-a94f-479e-b2c5-2ee0d1cabe06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.140291] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]521d90e5-66cb-4c84-f133-e7a14417eb8b, 'name': SearchDatastore_Task, 'duration_secs': 0.009984} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.140398] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.140605] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 459c5f25-8fb1-4e43-8f7f-359a7ff697f2/459c5f25-8fb1-4e43-8f7f-359a7ff697f2.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 934.140876] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd266178-2ed2-42c6-8cc1-ad1535a4bf0d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.148425] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 934.148425] env[61629]: value = "task-1354370" [ 934.148425] env[61629]: _type = "Task" [ 934.148425] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.156862] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354370, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.211571] env[61629]: DEBUG oslo_vmware.api [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.313545] env[61629]: DEBUG nova.scheduler.client.report [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 934.343605] env[61629]: DEBUG nova.compute.manager [req-f012843c-5e9a-4659-b18f-ef7f1bdad7d5 req-8654597e-9098-4877-8c1c-c8316dfb49f7 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Received event network-vif-deleted-7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 934.343873] env[61629]: INFO nova.compute.manager [req-f012843c-5e9a-4659-b18f-ef7f1bdad7d5 req-8654597e-9098-4877-8c1c-c8316dfb49f7 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Neutron deleted interface 7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3; detaching it from the instance and deleting it from the info cache [ 934.344111] env[61629]: DEBUG nova.network.neutron [req-f012843c-5e9a-4659-b18f-ef7f1bdad7d5 req-8654597e-9098-4877-8c1c-c8316dfb49f7 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.624392] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "refresh_cache-2ce60374-7baf-4d27-afbd-dcfaf6600a78" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.626215] env[61629]: DEBUG nova.compute.manager [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Instance network_info: |[{"id": "8a6b1581-a94f-479e-b2c5-2ee0d1cabe06", "address": "fa:16:3e:fe:c3:b4", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a6b1581-a9", "ovs_interfaceid": "8a6b1581-a94f-479e-b2c5-2ee0d1cabe06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 934.626215] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:c3:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8a6b1581-a94f-479e-b2c5-2ee0d1cabe06', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 934.633119] env[61629]: DEBUG oslo.service.loopingcall [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 934.633404] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 934.633654] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-630c43e8-c80e-484b-a42d-4341d4cb62c3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.654458] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 934.654458] env[61629]: value = "task-1354371" [ 934.654458] env[61629]: _type = "Task" [ 934.654458] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.658346] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354370, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.665471] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354371, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.678381] env[61629]: DEBUG nova.network.neutron [-] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.718195] env[61629]: DEBUG oslo_vmware.api [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.811107] env[61629]: DEBUG nova.network.neutron [-] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.819499] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.816s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.825162] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.994s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.825474] env[61629]: DEBUG nova.objects.instance [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lazy-loading 'pci_requests' on Instance uuid fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 934.844375] env[61629]: INFO nova.scheduler.client.report [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Deleted allocations for instance 3085a70f-360c-43a3-80d7-e7b87fb3e146 [ 934.850315] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c3df315-699f-45a0-9422-d4ba35073404 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.857598] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e623f1c9-7ab5-4cba-8abe-b2968ee64cbf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.885337] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "7cf87381-235e-449b-8269-61c2d4033028-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.885604] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "7cf87381-235e-449b-8269-61c2d4033028-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.885786] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "7cf87381-235e-449b-8269-61c2d4033028-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.898487] env[61629]: DEBUG nova.compute.manager [req-f012843c-5e9a-4659-b18f-ef7f1bdad7d5 req-8654597e-9098-4877-8c1c-c8316dfb49f7 service nova] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Detach interface failed, port_id=7ee83b47-4c23-43c0-9d9b-7a95f79e5fe3, reason: Instance 2b01eeae-64be-44b3-b4cf-c2a8490043e3 could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 934.922674] env[61629]: DEBUG nova.compute.manager [req-53c94fd7-1c22-42ba-9308-d56e01fc3f43 req-5f2714f7-547b-4368-8c32-472134288824 service nova] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Received event network-changed-8a6b1581-a94f-479e-b2c5-2ee0d1cabe06 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 934.922783] env[61629]: DEBUG nova.compute.manager [req-53c94fd7-1c22-42ba-9308-d56e01fc3f43 req-5f2714f7-547b-4368-8c32-472134288824 service nova] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Refreshing instance network info cache due to event network-changed-8a6b1581-a94f-479e-b2c5-2ee0d1cabe06. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 934.922988] env[61629]: DEBUG oslo_concurrency.lockutils [req-53c94fd7-1c22-42ba-9308-d56e01fc3f43 req-5f2714f7-547b-4368-8c32-472134288824 service nova] Acquiring lock "refresh_cache-2ce60374-7baf-4d27-afbd-dcfaf6600a78" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.923153] env[61629]: DEBUG oslo_concurrency.lockutils [req-53c94fd7-1c22-42ba-9308-d56e01fc3f43 req-5f2714f7-547b-4368-8c32-472134288824 service nova] Acquired lock "refresh_cache-2ce60374-7baf-4d27-afbd-dcfaf6600a78" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.923324] env[61629]: DEBUG nova.network.neutron [req-53c94fd7-1c22-42ba-9308-d56e01fc3f43 req-5f2714f7-547b-4368-8c32-472134288824 service nova] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Refreshing network info cache for port 8a6b1581-a94f-479e-b2c5-2ee0d1cabe06 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 935.158735] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354370, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512879} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.161664] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 459c5f25-8fb1-4e43-8f7f-359a7ff697f2/459c5f25-8fb1-4e43-8f7f-359a7ff697f2.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 935.161890] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 935.162157] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07d8db32-a3b7-4e49-9cc2-43c7f57d6be6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.168394] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354371, 'name': CreateVM_Task, 'duration_secs': 0.358393} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.169376] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 935.169675] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 935.169675] env[61629]: value = "task-1354372" [ 935.169675] env[61629]: _type = "Task" [ 935.169675] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.170289] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.170454] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.170771] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 935.171062] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-707f2840-dedb-424d-998a-6251fc64628b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.177614] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 935.177614] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52dbd887-6dd6-6a51-7085-d93ea4b5cf33" [ 935.177614] env[61629]: _type = "Task" [ 935.177614] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.180423] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354372, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.183416] env[61629]: INFO nova.compute.manager [-] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Took 1.46 seconds to deallocate network for instance. [ 935.191377] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52dbd887-6dd6-6a51-7085-d93ea4b5cf33, 'name': SearchDatastore_Task, 'duration_secs': 0.008944} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.191617] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.191840] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 935.192085] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.192242] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.192426] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 935.192901] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca8fa42b-ab49-45d5-beab-7aae4b8e6a80 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.200235] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 935.200419] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 935.201214] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-245585d3-af47-462b-9545-28d24059aef5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.208901] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 935.208901] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5269c0f7-e29d-d73f-1a5b-4f0c86de0182" [ 935.208901] env[61629]: _type = "Task" [ 935.208901] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.212518] env[61629]: DEBUG oslo_vmware.api [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.220266] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5269c0f7-e29d-d73f-1a5b-4f0c86de0182, 'name': SearchDatastore_Task, 'duration_secs': 0.007799} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.221572] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5511ee35-bb65-49ca-860a-371ae9271e88 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.227518] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 935.227518] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]520ee820-22b2-b5ce-8e68-478058932a05" [ 935.227518] env[61629]: _type = "Task" [ 935.227518] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.234850] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]520ee820-22b2-b5ce-8e68-478058932a05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.314045] env[61629]: INFO nova.compute.manager [-] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Took 1.58 seconds to deallocate network for instance. [ 935.329638] env[61629]: DEBUG nova.objects.instance [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lazy-loading 'numa_topology' on Instance uuid fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.354692] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0ed4b5f8-65da-45b0-89a0-4aef61f6264a tempest-ServersTestMultiNic-211152432 tempest-ServersTestMultiNic-211152432-project-member] Lock "3085a70f-360c-43a3-80d7-e7b87fb3e146" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.550s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.572665] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "a08e5762-5307-4dd8-a025-a1cdfd43025e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.572929] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a08e5762-5307-4dd8-a025-a1cdfd43025e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.682442] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354372, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059672} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.682725] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 935.683520] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb44ece-b28c-4f7d-b2ed-db4c8d5833ff {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.697637] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.706532] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 459c5f25-8fb1-4e43-8f7f-359a7ff697f2/459c5f25-8fb1-4e43-8f7f-359a7ff697f2.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 935.707606] env[61629]: DEBUG nova.network.neutron [req-53c94fd7-1c22-42ba-9308-d56e01fc3f43 req-5f2714f7-547b-4368-8c32-472134288824 service nova] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Updated VIF entry in instance network info cache for port 8a6b1581-a94f-479e-b2c5-2ee0d1cabe06. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 935.707885] env[61629]: DEBUG nova.network.neutron [req-53c94fd7-1c22-42ba-9308-d56e01fc3f43 req-5f2714f7-547b-4368-8c32-472134288824 service nova] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Updating instance_info_cache with network_info: [{"id": "8a6b1581-a94f-479e-b2c5-2ee0d1cabe06", "address": "fa:16:3e:fe:c3:b4", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a6b1581-a9", "ovs_interfaceid": "8a6b1581-a94f-479e-b2c5-2ee0d1cabe06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.709754] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72a12c44-b24a-454e-8bc4-920006ecda08 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.734383] env[61629]: DEBUG oslo_vmware.api [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.735959] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 935.735959] env[61629]: value = "task-1354373" [ 935.735959] env[61629]: _type = "Task" [ 935.735959] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.743055] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]520ee820-22b2-b5ce-8e68-478058932a05, 'name': SearchDatastore_Task, 'duration_secs': 0.010628} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.743796] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.744115] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 2ce60374-7baf-4d27-afbd-dcfaf6600a78/2ce60374-7baf-4d27-afbd-dcfaf6600a78.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 935.744399] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ad47f18-68a4-43e2-9a77-5724a90df1fc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.749040] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354373, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.752845] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 935.752845] env[61629]: value = "task-1354374" [ 935.752845] env[61629]: _type = "Task" [ 935.752845] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.760443] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354374, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.823443] env[61629]: DEBUG oslo_concurrency.lockutils [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.831083] env[61629]: INFO nova.compute.claims [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 935.966585] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "refresh_cache-7cf87381-235e-449b-8269-61c2d4033028" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.966585] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "refresh_cache-7cf87381-235e-449b-8269-61c2d4033028" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.966585] env[61629]: DEBUG nova.network.neutron [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 936.076664] env[61629]: DEBUG nova.compute.manager [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 936.220905] env[61629]: DEBUG oslo_vmware.api [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.224956] env[61629]: DEBUG oslo_concurrency.lockutils [req-53c94fd7-1c22-42ba-9308-d56e01fc3f43 req-5f2714f7-547b-4368-8c32-472134288824 service nova] Releasing lock "refresh_cache-2ce60374-7baf-4d27-afbd-dcfaf6600a78" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.225101] env[61629]: DEBUG nova.compute.manager [req-53c94fd7-1c22-42ba-9308-d56e01fc3f43 req-5f2714f7-547b-4368-8c32-472134288824 service nova] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Received event network-vif-deleted-85b39faa-8b58-4b86-b4df-a4b98f2a5325 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 936.245647] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354373, 'name': ReconfigVM_Task, 'duration_secs': 0.444811} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.245946] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 459c5f25-8fb1-4e43-8f7f-359a7ff697f2/459c5f25-8fb1-4e43-8f7f-359a7ff697f2.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 936.246592] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74588d06-1071-4755-afab-f51609da3d5a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.253524] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 936.253524] env[61629]: value = "task-1354375" [ 936.253524] env[61629]: _type = "Task" [ 936.253524] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.267332] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354375, 'name': Rename_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.270273] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354374, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.433867} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.270509] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 2ce60374-7baf-4d27-afbd-dcfaf6600a78/2ce60374-7baf-4d27-afbd-dcfaf6600a78.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 936.270720] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 936.270953] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f74c3bba-4798-4010-9b72-1e3d381cef2e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.276439] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 936.276439] env[61629]: value = "task-1354376" [ 936.276439] env[61629]: _type = "Task" [ 936.276439] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.283703] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354376, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.607421] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.725689] env[61629]: DEBUG oslo_vmware.api [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354361, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.763074] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354375, 'name': Rename_Task, 'duration_secs': 0.138009} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.763412] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 936.763625] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-635ce5cf-b694-4660-9236-ce239fb9a5a2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.770502] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 936.770502] env[61629]: value = "task-1354377" [ 936.770502] env[61629]: _type = "Task" [ 936.770502] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.778624] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354377, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.786114] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354376, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06752} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.786404] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 936.787249] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6b30c0-4ab9-499b-8311-aaa7a9694f42 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.790511] env[61629]: DEBUG nova.network.neutron [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Updating instance_info_cache with network_info: [{"id": "e28dd480-831a-49f0-804e-ad88763d3c24", "address": "fa:16:3e:9f:0b:da", "network": {"id": "534e08bb-ebea-429f-8a3d-733c418ea99b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1143213928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6d1f876ee054beb89ca0eb0776ddcd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape28dd480-83", "ovs_interfaceid": "e28dd480-831a-49f0-804e-ad88763d3c24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.812397] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] 2ce60374-7baf-4d27-afbd-dcfaf6600a78/2ce60374-7baf-4d27-afbd-dcfaf6600a78.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 936.813486] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-850f5de2-44de-4135-bc07-d9ed95682bb7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.835076] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 936.835076] env[61629]: value = "task-1354378" [ 936.835076] env[61629]: _type = "Task" [ 936.835076] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.846581] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354378, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.074648] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06fb01b-1595-4f29-a099-6dc88c2507d5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.082456] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d36ac7-987a-499f-8fbf-d7961f4d7f7b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.114433] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e99fac-8205-4c93-9163-7bcaa4b42b43 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.123528] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d99f86a-d989-4250-b934-b6c074fc4832 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.143159] env[61629]: DEBUG nova.compute.provider_tree [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.223986] env[61629]: DEBUG oslo_vmware.api [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354361, 'name': ReconfigVM_Task, 'duration_secs': 5.753427} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.224296] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.224665] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Reconfigured VM to detach interface {{(pid=61629) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 937.281411] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354377, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.293577] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "refresh_cache-7cf87381-235e-449b-8269-61c2d4033028" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.347830] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354378, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.647566] env[61629]: DEBUG nova.scheduler.client.report [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 937.780615] env[61629]: DEBUG oslo_vmware.api [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354377, 'name': PowerOnVM_Task, 'duration_secs': 0.582362} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.780941] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 937.781090] env[61629]: INFO nova.compute.manager [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Took 9.69 seconds to spawn the instance on the hypervisor. [ 937.781278] env[61629]: DEBUG nova.compute.manager [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 937.782051] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d18dd91-4bde-464e-a673-5bf7e84dae79 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.820254] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb3f740-0c74-406e-b8d0-5b4d260d6842 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.844042] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d1775f-9e80-4a19-a45a-75a84bec4f11 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.852371] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354378, 'name': ReconfigVM_Task, 'duration_secs': 0.670225} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.854845] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Reconfigured VM instance instance-00000059 to attach disk [datastore2] 2ce60374-7baf-4d27-afbd-dcfaf6600a78/2ce60374-7baf-4d27-afbd-dcfaf6600a78.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 937.855698] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Updating instance '7cf87381-235e-449b-8269-61c2d4033028' progress to 83 {{(pid=61629) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 937.859623] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5da3a65e-1604-496c-8e21-3025339b2b85 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.866438] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 937.866438] env[61629]: value = "task-1354379" [ 937.866438] env[61629]: _type = "Task" [ 937.866438] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.875424] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354379, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.152252] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.327s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.154601] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.751s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.156184] env[61629]: INFO nova.compute.claims [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 938.192610] env[61629]: INFO nova.network.neutron [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Updating port b8a895f7-ad9d-4d49-8460-de82459d88f7 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 938.308495] env[61629]: INFO nova.compute.manager [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Took 25.52 seconds to build instance. [ 938.364352] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 938.365113] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5fd63bb4-7922-42c8-a6ae-be3105acf01d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.371797] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 938.371797] env[61629]: value = "task-1354380" [ 938.371797] env[61629]: _type = "Task" [ 938.371797] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.377597] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354379, 'name': Rename_Task, 'duration_secs': 0.172207} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.377998] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 938.378710] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a32e37e-fbd1-4526-aa6e-3558aa619a05 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.384445] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354380, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.385616] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 938.385616] env[61629]: value = "task-1354381" [ 938.385616] env[61629]: _type = "Task" [ 938.385616] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.393192] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354381, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.524860] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.524860] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.524860] env[61629]: DEBUG nova.network.neutron [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 938.813500] env[61629]: DEBUG oslo_concurrency.lockutils [None req-680986a7-c395-4cd1-a2ab-8444012dd3ff tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.041s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.888820] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354380, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.904757] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354381, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.340820] env[61629]: INFO nova.network.neutron [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Port d666a690-afc1-4ce0-a878-192338b5dc0d from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 939.342067] env[61629]: DEBUG nova.network.neutron [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updating instance_info_cache with network_info: [{"id": "91aa1640-3097-4a26-9090-4081740f917d", "address": "fa:16:3e:d4:a8:15", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91aa1640-30", "ovs_interfaceid": "91aa1640-3097-4a26-9090-4081740f917d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.386655] env[61629]: DEBUG oslo_vmware.api [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354380, 'name': PowerOnVM_Task, 'duration_secs': 0.613603} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.387099] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 939.387359] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e1570d-59d1-4350-aab8-4aa61b334dc1 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Updating instance '7cf87381-235e-449b-8269-61c2d4033028' progress to 100 {{(pid=61629) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 939.403661] env[61629]: DEBUG oslo_vmware.api [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354381, 'name': PowerOnVM_Task, 'duration_secs': 0.625578} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.403988] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 939.404767] env[61629]: INFO nova.compute.manager [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Took 7.18 seconds to spawn the instance on the hypervisor. [ 939.405053] env[61629]: DEBUG nova.compute.manager [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 939.406312] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37429798-ae11-4787-8d23-f8d5136ab695 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.462204] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2f37bb-5504-4c28-bf04-8f184dcf364e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.473265] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1eb99f-63ae-4b05-83bd-ffe751e9d566 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.503737] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6aec30-f31c-4487-a0f1-a9d42404193a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.511519] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37d599c-eb9c-4a5c-a5a4-5cfc35e537f7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.528472] env[61629]: DEBUG nova.compute.provider_tree [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.847413] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.931024] env[61629]: INFO nova.compute.manager [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Took 26.57 seconds to build instance. [ 940.032087] env[61629]: DEBUG nova.scheduler.client.report [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 940.288714] env[61629]: DEBUG nova.compute.manager [req-e76f8fb6-ab91-48b5-a4c8-433c51871551 req-04278a1b-b339-4e0a-ba2e-cce5da9a9e3a service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Received event network-vif-plugged-b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 940.288944] env[61629]: DEBUG oslo_concurrency.lockutils [req-e76f8fb6-ab91-48b5-a4c8-433c51871551 req-04278a1b-b339-4e0a-ba2e-cce5da9a9e3a service nova] Acquiring lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.289689] env[61629]: DEBUG oslo_concurrency.lockutils [req-e76f8fb6-ab91-48b5-a4c8-433c51871551 req-04278a1b-b339-4e0a-ba2e-cce5da9a9e3a service nova] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.289689] env[61629]: DEBUG oslo_concurrency.lockutils [req-e76f8fb6-ab91-48b5-a4c8-433c51871551 req-04278a1b-b339-4e0a-ba2e-cce5da9a9e3a service nova] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.289689] env[61629]: DEBUG nova.compute.manager [req-e76f8fb6-ab91-48b5-a4c8-433c51871551 req-04278a1b-b339-4e0a-ba2e-cce5da9a9e3a service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] No waiting events found dispatching network-vif-plugged-b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 940.289689] env[61629]: WARNING nova.compute.manager [req-e76f8fb6-ab91-48b5-a4c8-433c51871551 req-04278a1b-b339-4e0a-ba2e-cce5da9a9e3a service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Received unexpected event network-vif-plugged-b8a895f7-ad9d-4d49-8460-de82459d88f7 for instance with vm_state shelved_offloaded and task_state spawning. [ 940.351640] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da099f59-fd1f-43ae-9dac-649a930bc9ee tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "interface-09890839-b1d9-4558-992d-b1a6f4c5f750-d666a690-afc1-4ce0-a878-192338b5dc0d" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.795s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.432788] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c1cfb6ef-cb23-43ab-bf8f-0345c74f82a7 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "2ce60374-7baf-4d27-afbd-dcfaf6600a78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.088s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.452699] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.452699] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquired lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.452699] env[61629]: DEBUG nova.network.neutron [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 940.539972] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.385s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.541340] env[61629]: DEBUG nova.compute.manager [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 940.546987] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.458s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.547327] env[61629]: DEBUG nova.objects.instance [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Lazy-loading 'resources' on Instance uuid 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 941.050478] env[61629]: DEBUG nova.compute.utils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 941.057047] env[61629]: DEBUG nova.compute.manager [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 941.057047] env[61629]: DEBUG nova.network.neutron [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 941.098571] env[61629]: DEBUG oslo_concurrency.lockutils [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "2ce60374-7baf-4d27-afbd-dcfaf6600a78" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.098775] env[61629]: DEBUG oslo_concurrency.lockutils [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "2ce60374-7baf-4d27-afbd-dcfaf6600a78" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.098981] env[61629]: DEBUG oslo_concurrency.lockutils [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "2ce60374-7baf-4d27-afbd-dcfaf6600a78-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.099185] env[61629]: DEBUG oslo_concurrency.lockutils [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "2ce60374-7baf-4d27-afbd-dcfaf6600a78-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.099357] env[61629]: DEBUG oslo_concurrency.lockutils [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "2ce60374-7baf-4d27-afbd-dcfaf6600a78-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.107433] env[61629]: INFO nova.compute.manager [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Terminating instance [ 941.112539] env[61629]: DEBUG nova.compute.manager [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 941.113452] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 941.114369] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd5ab8b-36fa-4c12-892b-2314b200b4e2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.126260] env[61629]: DEBUG nova.policy [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9e56746f39a4997ab1fc0b1672916ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '50082b813d4e403981cc956de8fd841d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 941.133949] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.134241] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd5e2fd9-c185-4091-956a-78be781f8492 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.144405] env[61629]: DEBUG oslo_vmware.api [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 941.144405] env[61629]: value = "task-1354382" [ 941.144405] env[61629]: _type = "Task" [ 941.144405] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.154269] env[61629]: DEBUG oslo_vmware.api [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354382, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.244681] env[61629]: DEBUG nova.network.neutron [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Updating instance_info_cache with network_info: [{"id": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "address": "fa:16:3e:7d:7e:9a", "network": {"id": "7ab21805-1836-4ac0-94d2-d715f9f3352e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1256584900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc5fe81fb0eb4820825cc8e97b8fe4f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8a895f7-ad", "ovs_interfaceid": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.412263] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d8e688-e176-47c5-bd38-adcb85ed3148 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.424546] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e2ae51-0b20-4c87-8132-8ba8743dbc18 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.458351] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e20964a-9943-4599-ba05-ba20e3fbb49b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.466596] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b091bc5e-0ca7-4f77-b013-c1b2fe4cd90c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.482256] env[61629]: DEBUG nova.compute.provider_tree [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.557247] env[61629]: DEBUG nova.compute.manager [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 941.656381] env[61629]: DEBUG oslo_vmware.api [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354382, 'name': PowerOffVM_Task, 'duration_secs': 0.335143} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.656381] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 941.656381] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.657594] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3cbaf50a-6b19-46ec-986e-a6f5618a48f8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.749508] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Releasing lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.781330] env[61629]: DEBUG nova.network.neutron [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Successfully created port: 734452b5-1029-4186-9c1f-a26f48e3b47f {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 941.793676] env[61629]: DEBUG nova.virt.hardware [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1252fc90d4a3b7987ce04ecaa067b928',container_format='bare',created_at=2024-10-24T13:01:34Z,direct_url=,disk_format='vmdk',id=1e2172a0-89df-4cab-a61f-a1c2288e9094,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-2065049068-shelved',owner='cc5fe81fb0eb4820825cc8e97b8fe4f2',properties=ImageMetaProps,protected=,size=31668224,status='active',tags=,updated_at=2024-10-24T13:01:51Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 941.794152] env[61629]: DEBUG nova.virt.hardware [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 941.794396] env[61629]: DEBUG nova.virt.hardware [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 941.794674] env[61629]: DEBUG nova.virt.hardware [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 941.794930] env[61629]: DEBUG nova.virt.hardware [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 941.795174] env[61629]: DEBUG nova.virt.hardware [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 941.796096] env[61629]: DEBUG nova.virt.hardware [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 941.796362] env[61629]: DEBUG nova.virt.hardware [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 941.796615] env[61629]: DEBUG nova.virt.hardware [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 941.796854] env[61629]: DEBUG nova.virt.hardware [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 941.797113] env[61629]: DEBUG nova.virt.hardware [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 941.798127] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6ca05f-741a-49fe-8118-f02b783092ca {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.806957] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc50f822-3ade-4c2a-86fb-17e16d9bbe15 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.825714] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:7e:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8a895f7-ad9d-4d49-8460-de82459d88f7', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 941.835025] env[61629]: DEBUG oslo.service.loopingcall [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.835351] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 941.835846] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d39adc58-e43d-47b4-8e91-cb91e5fe941a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.855834] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 941.855834] env[61629]: value = "task-1354384" [ 941.855834] env[61629]: _type = "Task" [ 941.855834] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.864658] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354384, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.917994] env[61629]: DEBUG oslo_concurrency.lockutils [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "7cf87381-235e-449b-8269-61c2d4033028" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.918456] env[61629]: DEBUG oslo_concurrency.lockutils [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "7cf87381-235e-449b-8269-61c2d4033028" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.918776] env[61629]: DEBUG nova.compute.manager [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Going to confirm migration 1 {{(pid=61629) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 941.985701] env[61629]: DEBUG nova.scheduler.client.report [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 942.341946] env[61629]: DEBUG nova.compute.manager [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Received event network-changed-91aa1640-3097-4a26-9090-4081740f917d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 942.342347] env[61629]: DEBUG nova.compute.manager [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Refreshing instance network info cache due to event network-changed-91aa1640-3097-4a26-9090-4081740f917d. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 942.342646] env[61629]: DEBUG oslo_concurrency.lockutils [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] Acquiring lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.342646] env[61629]: DEBUG oslo_concurrency.lockutils [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] Acquired lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.342933] env[61629]: DEBUG nova.network.neutron [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Refreshing network info cache for port 91aa1640-3097-4a26-9090-4081740f917d {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 942.366742] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354384, 'name': CreateVM_Task, 'duration_secs': 0.360712} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.366908] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 942.367681] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1e2172a0-89df-4cab-a61f-a1c2288e9094" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.367851] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1e2172a0-89df-4cab-a61f-a1c2288e9094" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.368324] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1e2172a0-89df-4cab-a61f-a1c2288e9094" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 942.368658] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-078274ee-d3b9-4ff2-8375-605fa45e762e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.373579] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 942.373579] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]525d1d30-c175-80c5-3bb9-16666888f2ab" [ 942.373579] env[61629]: _type = "Task" [ 942.373579] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.381882] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]525d1d30-c175-80c5-3bb9-16666888f2ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.491238] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.944s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.494416] env[61629]: DEBUG oslo_concurrency.lockutils [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.526s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.494728] env[61629]: DEBUG nova.objects.instance [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Lazy-loading 'resources' on Instance uuid cd165a78-21f9-4fc7-88e5-5ab35047eacc {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.503372] env[61629]: DEBUG oslo_concurrency.lockutils [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "refresh_cache-7cf87381-235e-449b-8269-61c2d4033028" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.503454] env[61629]: DEBUG oslo_concurrency.lockutils [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "refresh_cache-7cf87381-235e-449b-8269-61c2d4033028" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.503636] env[61629]: DEBUG nova.network.neutron [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 942.503969] env[61629]: DEBUG nova.objects.instance [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lazy-loading 'info_cache' on Instance uuid 7cf87381-235e-449b-8269-61c2d4033028 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.534040] env[61629]: INFO nova.scheduler.client.report [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Deleted allocations for instance 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b [ 942.572154] env[61629]: DEBUG nova.compute.manager [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 942.599506] env[61629]: DEBUG nova.virt.hardware [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 942.599818] env[61629]: DEBUG nova.virt.hardware [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 942.600072] env[61629]: DEBUG nova.virt.hardware [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.600327] env[61629]: DEBUG nova.virt.hardware [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 942.600497] env[61629]: DEBUG nova.virt.hardware [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.600654] env[61629]: DEBUG nova.virt.hardware [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 942.600875] env[61629]: DEBUG nova.virt.hardware [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 942.601169] env[61629]: DEBUG nova.virt.hardware [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 942.601371] env[61629]: DEBUG nova.virt.hardware [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 942.601546] env[61629]: DEBUG nova.virt.hardware [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 942.601726] env[61629]: DEBUG nova.virt.hardware [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 942.602876] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3be228-891c-4a2e-a81b-fb489c8e0afa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.612237] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98717855-5e8f-4009-b349-5c3c9b9d2001 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.893375] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1e2172a0-89df-4cab-a61f-a1c2288e9094" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.893658] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Processing image 1e2172a0-89df-4cab-a61f-a1c2288e9094 {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.893861] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1e2172a0-89df-4cab-a61f-a1c2288e9094/1e2172a0-89df-4cab-a61f-a1c2288e9094.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.894018] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1e2172a0-89df-4cab-a61f-a1c2288e9094/1e2172a0-89df-4cab-a61f-a1c2288e9094.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.894949] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.894949] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5043bc67-ec13-438d-8799-5c977d4bfc60 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.916901] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.921053] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 942.921053] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2dbd3e5-42b4-444d-b68a-820e674f6ed9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.925627] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 942.925627] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52d5ca18-c204-6b14-f14b-d4e4107a6a47" [ 942.925627] env[61629]: _type = "Task" [ 942.925627] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.936624] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d5ca18-c204-6b14-f14b-d4e4107a6a47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.049289] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e7e4d2e9-f6ca-4dd8-9631-c89091c5d0c8 tempest-ServersNegativeTestMultiTenantJSON-967089285 tempest-ServersNegativeTestMultiTenantJSON-967089285-project-member] Lock "2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.835s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.120698] env[61629]: DEBUG nova.network.neutron [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updated VIF entry in instance network info cache for port 91aa1640-3097-4a26-9090-4081740f917d. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 943.120698] env[61629]: DEBUG nova.network.neutron [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updating instance_info_cache with network_info: [{"id": "91aa1640-3097-4a26-9090-4081740f917d", "address": "fa:16:3e:d4:a8:15", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91aa1640-30", "ovs_interfaceid": "91aa1640-3097-4a26-9090-4081740f917d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.140504] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 943.140745] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 943.140929] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleting the datastore file [datastore2] 2ce60374-7baf-4d27-afbd-dcfaf6600a78 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 943.143748] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1f4f9a7-c681-4064-9b36-37c1bd0e1f2f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.151682] env[61629]: DEBUG oslo_vmware.api [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 943.151682] env[61629]: value = "task-1354385" [ 943.151682] env[61629]: _type = "Task" [ 943.151682] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.163458] env[61629]: DEBUG oslo_vmware.api [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354385, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.262904] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b936a2ed-1734-4dbc-a7e4-c01c39cffce9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.270308] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227fbae9-5125-4b60-821c-dafb9fd48fe5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.299713] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0336c7c7-0162-4789-962f-44b869686356 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.306792] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f2813c-ba53-4008-b2c2-9a0d26ce2159 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.321133] env[61629]: DEBUG nova.compute.provider_tree [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.440827] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Preparing fetch location {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 943.441296] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Fetch image to [datastore1] OSTACK_IMG_a92e522e-69e8-4234-b3a9-4b1412dec328/OSTACK_IMG_a92e522e-69e8-4234-b3a9-4b1412dec328.vmdk {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 943.441623] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Downloading stream optimized image 1e2172a0-89df-4cab-a61f-a1c2288e9094 to [datastore1] OSTACK_IMG_a92e522e-69e8-4234-b3a9-4b1412dec328/OSTACK_IMG_a92e522e-69e8-4234-b3a9-4b1412dec328.vmdk on the data store datastore1 as vApp {{(pid=61629) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 943.441940] env[61629]: DEBUG nova.virt.vmwareapi.images [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Downloading image file data 1e2172a0-89df-4cab-a61f-a1c2288e9094 to the ESX as VM named 'OSTACK_IMG_a92e522e-69e8-4234-b3a9-4b1412dec328' {{(pid=61629) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 943.545052] env[61629]: DEBUG oslo_vmware.rw_handles [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 943.545052] env[61629]: value = "resgroup-9" [ 943.545052] env[61629]: _type = "ResourcePool" [ 943.545052] env[61629]: }. {{(pid=61629) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 943.545052] env[61629]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-3b98bd5a-3591-4a96-be66-421b70ba8bcf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.570380] env[61629]: DEBUG oslo_vmware.rw_handles [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lease: (returnval){ [ 943.570380] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52733f1d-2487-8a3e-c304-76450f8a578b" [ 943.570380] env[61629]: _type = "HttpNfcLease" [ 943.570380] env[61629]: } obtained for vApp import into resource pool (val){ [ 943.570380] env[61629]: value = "resgroup-9" [ 943.570380] env[61629]: _type = "ResourcePool" [ 943.570380] env[61629]: }. {{(pid=61629) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 943.570380] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the lease: (returnval){ [ 943.570380] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52733f1d-2487-8a3e-c304-76450f8a578b" [ 943.570380] env[61629]: _type = "HttpNfcLease" [ 943.570380] env[61629]: } to be ready. {{(pid=61629) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 943.575638] env[61629]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 943.575638] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52733f1d-2487-8a3e-c304-76450f8a578b" [ 943.575638] env[61629]: _type = "HttpNfcLease" [ 943.575638] env[61629]: } is initializing. {{(pid=61629) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 943.622944] env[61629]: DEBUG oslo_concurrency.lockutils [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] Releasing lock "refresh_cache-09890839-b1d9-4558-992d-b1a6f4c5f750" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.623302] env[61629]: DEBUG nova.compute.manager [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Received event network-changed-a193ab2f-5a9d-4411-94f9-cc5834b60795 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 943.623558] env[61629]: DEBUG nova.compute.manager [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Refreshing instance network info cache due to event network-changed-a193ab2f-5a9d-4411-94f9-cc5834b60795. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 943.623845] env[61629]: DEBUG oslo_concurrency.lockutils [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] Acquiring lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.624143] env[61629]: DEBUG oslo_concurrency.lockutils [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] Acquired lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.624277] env[61629]: DEBUG nova.network.neutron [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Refreshing network info cache for port a193ab2f-5a9d-4411-94f9-cc5834b60795 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 943.666042] env[61629]: DEBUG oslo_vmware.api [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354385, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.303685} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.666540] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 943.666953] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 943.667352] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 943.667987] env[61629]: INFO nova.compute.manager [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Took 2.55 seconds to destroy the instance on the hypervisor. [ 943.668360] env[61629]: DEBUG oslo.service.loopingcall [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 943.668968] env[61629]: DEBUG nova.compute.manager [-] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 943.669219] env[61629]: DEBUG nova.network.neutron [-] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 943.694513] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "interface-7c3e9d0f-88a8-41fe-bf61-e3db34d36928-d666a690-afc1-4ce0-a878-192338b5dc0d" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.694680] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "interface-7c3e9d0f-88a8-41fe-bf61-e3db34d36928-d666a690-afc1-4ce0-a878-192338b5dc0d" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.694989] env[61629]: DEBUG nova.objects.instance [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lazy-loading 'flavor' on Instance uuid 7c3e9d0f-88a8-41fe-bf61-e3db34d36928 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 943.725959] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Acquiring lock "87172592-f557-467f-ace2-805fd822681d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.727058] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Lock "87172592-f557-467f-ace2-805fd822681d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.777228] env[61629]: DEBUG nova.network.neutron [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Updating instance_info_cache with network_info: [{"id": "e28dd480-831a-49f0-804e-ad88763d3c24", "address": "fa:16:3e:9f:0b:da", "network": {"id": "534e08bb-ebea-429f-8a3d-733c418ea99b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1143213928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6d1f876ee054beb89ca0eb0776ddcd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape28dd480-83", "ovs_interfaceid": "e28dd480-831a-49f0-804e-ad88763d3c24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.823701] env[61629]: DEBUG nova.scheduler.client.report [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 943.937308] env[61629]: DEBUG nova.network.neutron [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Successfully updated port: 734452b5-1029-4186-9c1f-a26f48e3b47f {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 943.951986] env[61629]: DEBUG nova.compute.manager [req-40b1405f-358f-40bd-9a30-ce78802b44c0 req-26a6b412-ff4e-42d4-86d8-25eec12ea094 service nova] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Received event network-vif-plugged-734452b5-1029-4186-9c1f-a26f48e3b47f {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 943.952947] env[61629]: DEBUG oslo_concurrency.lockutils [req-40b1405f-358f-40bd-9a30-ce78802b44c0 req-26a6b412-ff4e-42d4-86d8-25eec12ea094 service nova] Acquiring lock "ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.953288] env[61629]: DEBUG oslo_concurrency.lockutils [req-40b1405f-358f-40bd-9a30-ce78802b44c0 req-26a6b412-ff4e-42d4-86d8-25eec12ea094 service nova] Lock "ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.953526] env[61629]: DEBUG oslo_concurrency.lockutils [req-40b1405f-358f-40bd-9a30-ce78802b44c0 req-26a6b412-ff4e-42d4-86d8-25eec12ea094 service nova] Lock "ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.953841] env[61629]: DEBUG nova.compute.manager [req-40b1405f-358f-40bd-9a30-ce78802b44c0 req-26a6b412-ff4e-42d4-86d8-25eec12ea094 service nova] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] No waiting events found dispatching network-vif-plugged-734452b5-1029-4186-9c1f-a26f48e3b47f {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 943.954090] env[61629]: WARNING nova.compute.manager [req-40b1405f-358f-40bd-9a30-ce78802b44c0 req-26a6b412-ff4e-42d4-86d8-25eec12ea094 service nova] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Received unexpected event network-vif-plugged-734452b5-1029-4186-9c1f-a26f48e3b47f for instance with vm_state building and task_state spawning. [ 944.081242] env[61629]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 944.081242] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52733f1d-2487-8a3e-c304-76450f8a578b" [ 944.081242] env[61629]: _type = "HttpNfcLease" [ 944.081242] env[61629]: } is ready. {{(pid=61629) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 944.081656] env[61629]: DEBUG oslo_vmware.rw_handles [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 944.081656] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52733f1d-2487-8a3e-c304-76450f8a578b" [ 944.081656] env[61629]: _type = "HttpNfcLease" [ 944.081656] env[61629]: }. {{(pid=61629) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 944.082523] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d11c91-5694-4b1c-986e-9087b3e49a59 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.092340] env[61629]: DEBUG oslo_vmware.rw_handles [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522c04e4-41cd-d74b-a8ea-924fc088e519/disk-0.vmdk from lease info. {{(pid=61629) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 944.092660] env[61629]: DEBUG oslo_vmware.rw_handles [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Creating HTTP connection to write to file with size = 31668224 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522c04e4-41cd-d74b-a8ea-924fc088e519/disk-0.vmdk. {{(pid=61629) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 944.165234] env[61629]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f0144408-7016-4a79-a953-d3fe71620f93 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.229450] env[61629]: DEBUG nova.compute.manager [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 944.279872] env[61629]: DEBUG oslo_concurrency.lockutils [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "refresh_cache-7cf87381-235e-449b-8269-61c2d4033028" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.280184] env[61629]: DEBUG nova.objects.instance [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lazy-loading 'migration_context' on Instance uuid 7cf87381-235e-449b-8269-61c2d4033028 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.332777] env[61629]: DEBUG oslo_concurrency.lockutils [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.836s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.333253] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.298s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.336188] env[61629]: INFO nova.compute.claims [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 944.367561] env[61629]: INFO nova.scheduler.client.report [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Deleted allocations for instance cd165a78-21f9-4fc7-88e5-5ab35047eacc [ 944.407456] env[61629]: DEBUG nova.objects.instance [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lazy-loading 'pci_requests' on Instance uuid 7c3e9d0f-88a8-41fe-bf61-e3db34d36928 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.448193] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Acquiring lock "refresh_cache-ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.448193] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Acquired lock "refresh_cache-ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.448193] env[61629]: DEBUG nova.network.neutron [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 944.568325] env[61629]: DEBUG nova.network.neutron [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Updated VIF entry in instance network info cache for port a193ab2f-5a9d-4411-94f9-cc5834b60795. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 944.568705] env[61629]: DEBUG nova.network.neutron [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Updating instance_info_cache with network_info: [{"id": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "address": "fa:16:3e:f8:02:ef", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa193ab2f-5a", "ovs_interfaceid": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.579807] env[61629]: DEBUG nova.network.neutron [-] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.764369] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.783146] env[61629]: DEBUG nova.objects.base [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Object Instance<7cf87381-235e-449b-8269-61c2d4033028> lazy-loaded attributes: info_cache,migration_context {{(pid=61629) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 944.784101] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c8b43a-6cd1-4881-8db5-fb681eae964f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.807434] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b18c612e-e4c5-44b3-86c1-2614f8c121a3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.814115] env[61629]: DEBUG oslo_vmware.api [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 944.814115] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52d86efd-759c-6291-5e1a-022fe23eb9c2" [ 944.814115] env[61629]: _type = "Task" [ 944.814115] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.823331] env[61629]: DEBUG oslo_vmware.api [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d86efd-759c-6291-5e1a-022fe23eb9c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.876314] env[61629]: DEBUG oslo_concurrency.lockutils [None req-67115c72-4292-49f5-ad58-34ea0fb806d9 tempest-TenantUsagesTestJSON-1345049248 tempest-TenantUsagesTestJSON-1345049248-project-member] Lock "cd165a78-21f9-4fc7-88e5-5ab35047eacc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.042s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.912280] env[61629]: DEBUG nova.objects.base [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Object Instance<7c3e9d0f-88a8-41fe-bf61-e3db34d36928> lazy-loaded attributes: flavor,pci_requests {{(pid=61629) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 944.912451] env[61629]: DEBUG nova.network.neutron [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 945.064503] env[61629]: DEBUG nova.network.neutron [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 945.071648] env[61629]: DEBUG oslo_concurrency.lockutils [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] Releasing lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.071956] env[61629]: DEBUG nova.compute.manager [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Received event network-changed-b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.072173] env[61629]: DEBUG nova.compute.manager [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Refreshing instance network info cache due to event network-changed-b8a895f7-ad9d-4d49-8460-de82459d88f7. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 945.072422] env[61629]: DEBUG oslo_concurrency.lockutils [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] Acquiring lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.072565] env[61629]: DEBUG oslo_concurrency.lockutils [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] Acquired lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.072728] env[61629]: DEBUG nova.network.neutron [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Refreshing network info cache for port b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 945.083284] env[61629]: INFO nova.compute.manager [-] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Took 1.41 seconds to deallocate network for instance. [ 945.164018] env[61629]: DEBUG nova.policy [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38cc8b6343d54d30a3f6f13512d23020', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e7fced3a50d4821b42cf087d8111cb7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 945.287659] env[61629]: DEBUG oslo_vmware.rw_handles [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Completed reading data from the image iterator. {{(pid=61629) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 945.288802] env[61629]: DEBUG oslo_vmware.rw_handles [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522c04e4-41cd-d74b-a8ea-924fc088e519/disk-0.vmdk. {{(pid=61629) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 945.288926] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b454d6bf-5848-4a61-b13b-e5d5fb0cee73 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.300331] env[61629]: DEBUG oslo_vmware.rw_handles [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522c04e4-41cd-d74b-a8ea-924fc088e519/disk-0.vmdk is in state: ready. {{(pid=61629) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 945.300552] env[61629]: DEBUG oslo_vmware.rw_handles [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522c04e4-41cd-d74b-a8ea-924fc088e519/disk-0.vmdk. {{(pid=61629) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 945.300810] env[61629]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-ed5c6619-633b-4688-a056-47af7874c41b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.329866] env[61629]: DEBUG oslo_vmware.api [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d86efd-759c-6291-5e1a-022fe23eb9c2, 'name': SearchDatastore_Task, 'duration_secs': 0.00916} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.330267] env[61629]: DEBUG oslo_concurrency.lockutils [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.392759] env[61629]: DEBUG nova.network.neutron [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Updating instance_info_cache with network_info: [{"id": "734452b5-1029-4186-9c1f-a26f48e3b47f", "address": "fa:16:3e:d7:4b:ac", "network": {"id": "4262cd54-88ef-46de-a69c-b6c6392820da", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-520655156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50082b813d4e403981cc956de8fd841d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap734452b5-10", "ovs_interfaceid": "734452b5-1029-4186-9c1f-a26f48e3b47f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.501057] env[61629]: DEBUG oslo_vmware.rw_handles [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522c04e4-41cd-d74b-a8ea-924fc088e519/disk-0.vmdk. {{(pid=61629) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 945.501351] env[61629]: INFO nova.virt.vmwareapi.images [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Downloaded image file data 1e2172a0-89df-4cab-a61f-a1c2288e9094 [ 945.505100] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30299d1-f995-4d46-9f81-98d907bbd197 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.526416] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e8e205e-2e7e-4e7d-b332-7975d9589bd2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.556862] env[61629]: INFO nova.virt.vmwareapi.images [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] The imported VM was unregistered [ 945.562189] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Caching image {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 945.563615] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Creating directory with path [datastore1] devstack-image-cache_base/1e2172a0-89df-4cab-a61f-a1c2288e9094 {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 945.566821] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-287eca16-2578-4518-9e6b-66648dcd564e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.590384] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Created directory with path [datastore1] devstack-image-cache_base/1e2172a0-89df-4cab-a61f-a1c2288e9094 {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 945.590384] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_a92e522e-69e8-4234-b3a9-4b1412dec328/OSTACK_IMG_a92e522e-69e8-4234-b3a9-4b1412dec328.vmdk to [datastore1] devstack-image-cache_base/1e2172a0-89df-4cab-a61f-a1c2288e9094/1e2172a0-89df-4cab-a61f-a1c2288e9094.vmdk. {{(pid=61629) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 945.591315] env[61629]: DEBUG oslo_concurrency.lockutils [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.591950] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-655ea08c-7c0f-4a35-86b8-1bb9c6bbd5f6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.599800] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 945.599800] env[61629]: value = "task-1354388" [ 945.599800] env[61629]: _type = "Task" [ 945.599800] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.609583] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354388, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.658048] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e30f96-764c-403c-8f46-b782a6fa3b03 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.665817] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea71515e-bc3b-4164-9cca-e2a38219dff9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.697526] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13295047-39df-4c94-bb5b-ba4ca1c17c9e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.709704] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3420be-caef-48ee-b021-f586970cb04a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.731822] env[61629]: DEBUG nova.compute.provider_tree [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.900563] env[61629]: DEBUG nova.network.neutron [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Updated VIF entry in instance network info cache for port b8a895f7-ad9d-4d49-8460-de82459d88f7. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 945.900969] env[61629]: DEBUG nova.network.neutron [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Updating instance_info_cache with network_info: [{"id": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "address": "fa:16:3e:7d:7e:9a", "network": {"id": "7ab21805-1836-4ac0-94d2-d715f9f3352e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1256584900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc5fe81fb0eb4820825cc8e97b8fe4f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8a895f7-ad", "ovs_interfaceid": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.904660] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Releasing lock "refresh_cache-ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.904660] env[61629]: DEBUG nova.compute.manager [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Instance network_info: |[{"id": "734452b5-1029-4186-9c1f-a26f48e3b47f", "address": "fa:16:3e:d7:4b:ac", "network": {"id": "4262cd54-88ef-46de-a69c-b6c6392820da", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-520655156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50082b813d4e403981cc956de8fd841d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap734452b5-10", "ovs_interfaceid": "734452b5-1029-4186-9c1f-a26f48e3b47f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 945.905081] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:4b:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6966f473-59ac-49bb-9b7a-22c61f4e61e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '734452b5-1029-4186-9c1f-a26f48e3b47f', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 945.913426] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Creating folder: Project (50082b813d4e403981cc956de8fd841d). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 945.914381] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e86b69d-4db5-496b-a885-9ddaaefcaf6c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.928161] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Created folder: Project (50082b813d4e403981cc956de8fd841d) in parent group-v288443. [ 945.928387] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Creating folder: Instances. Parent ref: group-v288550. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 945.928703] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-751592e2-a577-47e5-ae05-67e6aadc5ee8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.938537] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Created folder: Instances in parent group-v288550. [ 945.938932] env[61629]: DEBUG oslo.service.loopingcall [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 945.939016] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 945.939248] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ba5341c-df48-4199-bb50-780506e5877d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.959689] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 945.959689] env[61629]: value = "task-1354391" [ 945.959689] env[61629]: _type = "Task" [ 945.959689] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.967968] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354391, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.991851] env[61629]: DEBUG nova.compute.manager [req-f296e58d-bbcc-4e1e-900a-89c765a2b580 req-995c9202-6462-450e-9c8d-41d3de11fea5 service nova] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Received event network-changed-734452b5-1029-4186-9c1f-a26f48e3b47f {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.991851] env[61629]: DEBUG nova.compute.manager [req-f296e58d-bbcc-4e1e-900a-89c765a2b580 req-995c9202-6462-450e-9c8d-41d3de11fea5 service nova] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Refreshing instance network info cache due to event network-changed-734452b5-1029-4186-9c1f-a26f48e3b47f. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 945.991997] env[61629]: DEBUG oslo_concurrency.lockutils [req-f296e58d-bbcc-4e1e-900a-89c765a2b580 req-995c9202-6462-450e-9c8d-41d3de11fea5 service nova] Acquiring lock "refresh_cache-ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.992051] env[61629]: DEBUG oslo_concurrency.lockutils [req-f296e58d-bbcc-4e1e-900a-89c765a2b580 req-995c9202-6462-450e-9c8d-41d3de11fea5 service nova] Acquired lock "refresh_cache-ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.992257] env[61629]: DEBUG nova.network.neutron [req-f296e58d-bbcc-4e1e-900a-89c765a2b580 req-995c9202-6462-450e-9c8d-41d3de11fea5 service nova] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Refreshing network info cache for port 734452b5-1029-4186-9c1f-a26f48e3b47f {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 946.101863] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.102238] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.126538] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354388, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.235705] env[61629]: DEBUG nova.scheduler.client.report [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 946.404985] env[61629]: DEBUG oslo_concurrency.lockutils [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] Releasing lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.404985] env[61629]: DEBUG nova.compute.manager [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Received event network-changed-c827ba81-d74a-4ff3-bfc2-81b5e09c683c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 946.405148] env[61629]: DEBUG nova.compute.manager [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Refreshing instance network info cache due to event network-changed-c827ba81-d74a-4ff3-bfc2-81b5e09c683c. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 946.405389] env[61629]: DEBUG oslo_concurrency.lockutils [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] Acquiring lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.405489] env[61629]: DEBUG oslo_concurrency.lockutils [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] Acquired lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.405661] env[61629]: DEBUG nova.network.neutron [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Refreshing network info cache for port c827ba81-d74a-4ff3-bfc2-81b5e09c683c {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 946.472764] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354391, 'name': CreateVM_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.614725] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354388, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.619895] env[61629]: DEBUG nova.compute.utils [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 946.741802] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.408s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.743555] env[61629]: DEBUG nova.compute.manager [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 946.747829] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.050s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.750098] env[61629]: DEBUG nova.objects.instance [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lazy-loading 'resources' on Instance uuid 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.952193] env[61629]: DEBUG nova.network.neutron [req-f296e58d-bbcc-4e1e-900a-89c765a2b580 req-995c9202-6462-450e-9c8d-41d3de11fea5 service nova] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Updated VIF entry in instance network info cache for port 734452b5-1029-4186-9c1f-a26f48e3b47f. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 946.952490] env[61629]: DEBUG nova.network.neutron [req-f296e58d-bbcc-4e1e-900a-89c765a2b580 req-995c9202-6462-450e-9c8d-41d3de11fea5 service nova] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Updating instance_info_cache with network_info: [{"id": "734452b5-1029-4186-9c1f-a26f48e3b47f", "address": "fa:16:3e:d7:4b:ac", "network": {"id": "4262cd54-88ef-46de-a69c-b6c6392820da", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-520655156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50082b813d4e403981cc956de8fd841d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6966f473-59ac-49bb-9b7a-22c61f4e61e2", "external-id": "nsx-vlan-transportzone-668", "segmentation_id": 668, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap734452b5-10", "ovs_interfaceid": "734452b5-1029-4186-9c1f-a26f48e3b47f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.982793] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354391, 'name': CreateVM_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.113629] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354388, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.123604] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.021s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.164534] env[61629]: DEBUG nova.network.neutron [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Updated VIF entry in instance network info cache for port c827ba81-d74a-4ff3-bfc2-81b5e09c683c. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 947.165609] env[61629]: DEBUG nova.network.neutron [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Updating instance_info_cache with network_info: [{"id": "c827ba81-d74a-4ff3-bfc2-81b5e09c683c", "address": "fa:16:3e:6d:96:2d", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc827ba81-d7", "ovs_interfaceid": "c827ba81-d74a-4ff3-bfc2-81b5e09c683c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.254329] env[61629]: DEBUG nova.compute.utils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 947.259424] env[61629]: DEBUG nova.compute.manager [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 947.259675] env[61629]: DEBUG nova.network.neutron [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 947.304936] env[61629]: DEBUG nova.policy [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23e2354567b747cab5a15764026e21f6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd318d29ec50427eb997c83837120c9c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 947.457709] env[61629]: DEBUG oslo_concurrency.lockutils [req-f296e58d-bbcc-4e1e-900a-89c765a2b580 req-995c9202-6462-450e-9c8d-41d3de11fea5 service nova] Releasing lock "refresh_cache-ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.458023] env[61629]: DEBUG nova.compute.manager [req-f296e58d-bbcc-4e1e-900a-89c765a2b580 req-995c9202-6462-450e-9c8d-41d3de11fea5 service nova] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Received event network-vif-deleted-8a6b1581-a94f-479e-b2c5-2ee0d1cabe06 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 947.481680] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354391, 'name': CreateVM_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.529956] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6218c41e-98b0-41cc-87d7-a67294464cf6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.541128] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8ad6c0-e6c7-4c30-9a5b-084e96d5d9c0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.583493] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dde5f4f-c8b8-407e-81be-a0dfc38395b6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.588669] env[61629]: DEBUG nova.network.neutron [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Successfully updated port: d666a690-afc1-4ce0-a878-192338b5dc0d {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 947.593916] env[61629]: DEBUG nova.network.neutron [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Successfully created port: ed466750-9924-4d59-a6a8-bc0bdb462dfe {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 947.597412] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf000e0-4781-41ca-8137-0b538e1444cd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.627777] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354388, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.628455] env[61629]: DEBUG nova.compute.provider_tree [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.668371] env[61629]: DEBUG oslo_concurrency.lockutils [req-2c8820b2-89b3-4b9e-ac74-6d154d21f2f1 req-7e72c35f-9abe-49bb-b442-2760fb4ea71c service nova] Releasing lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.759808] env[61629]: DEBUG nova.compute.manager [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 947.977822] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354391, 'name': CreateVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.102137] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.102137] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.102137] env[61629]: DEBUG nova.network.neutron [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 948.116967] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354388, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.131860] env[61629]: DEBUG nova.scheduler.client.report [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 948.202654] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.202943] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.203213] env[61629]: INFO nova.compute.manager [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Attaching volume d653648b-258a-40e8-bd4a-ee2981909fd0 to /dev/sdb [ 948.254765] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf51414-b80b-4bd8-b712-9c0ee4c58012 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.275229] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e980d1-f290-485e-866a-daa4f60e9c4c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.296687] env[61629]: DEBUG nova.virt.block_device [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Updating existing volume attachment record: b66f3df8-20f2-41e0-bad9-e5e9a13aedc4 {{(pid=61629) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 948.477408] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354391, 'name': CreateVM_Task, 'duration_secs': 2.195673} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.477684] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 948.478673] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.478955] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.479403] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 948.479682] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a3db99d-0bf8-4e1f-8315-cd2743372055 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.484430] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Waiting for the task: (returnval){ [ 948.484430] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52688ac9-0900-c70d-3c4d-c9d4561f1380" [ 948.484430] env[61629]: _type = "Task" [ 948.484430] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.492862] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52688ac9-0900-c70d-3c4d-c9d4561f1380, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.618813] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354388, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.639080] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.891s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.642071] env[61629]: DEBUG oslo_concurrency.lockutils [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.819s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.642327] env[61629]: DEBUG nova.objects.instance [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lazy-loading 'resources' on Instance uuid 2b01eeae-64be-44b3-b4cf-c2a8490043e3 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.665281] env[61629]: WARNING nova.network.neutron [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] 74993df5-f495-415f-bb5a-87983f0b2da1 already exists in list: networks containing: ['74993df5-f495-415f-bb5a-87983f0b2da1']. ignoring it [ 948.675414] env[61629]: INFO nova.scheduler.client.report [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Deleted allocations for instance 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66 [ 948.776426] env[61629]: DEBUG nova.compute.manager [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 948.812802] env[61629]: DEBUG nova.virt.hardware [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 948.813091] env[61629]: DEBUG nova.virt.hardware [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 948.813482] env[61629]: DEBUG nova.virt.hardware [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 948.813723] env[61629]: DEBUG nova.virt.hardware [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 948.813900] env[61629]: DEBUG nova.virt.hardware [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 948.814067] env[61629]: DEBUG nova.virt.hardware [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 948.814286] env[61629]: DEBUG nova.virt.hardware [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 948.814446] env[61629]: DEBUG nova.virt.hardware [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 948.816036] env[61629]: DEBUG nova.virt.hardware [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 948.816036] env[61629]: DEBUG nova.virt.hardware [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 948.816036] env[61629]: DEBUG nova.virt.hardware [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 948.816288] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca81b571-9a4d-4490-961e-13dbad93afe0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.824552] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cab54b4-09e2-4561-b145-3394b0268acb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.941219] env[61629]: DEBUG nova.compute.manager [req-f47af008-8607-4834-8db7-01259581528f req-1549bf23-60e4-4c59-82a7-505f956c116c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Received event network-vif-plugged-d666a690-afc1-4ce0-a878-192338b5dc0d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 948.941532] env[61629]: DEBUG oslo_concurrency.lockutils [req-f47af008-8607-4834-8db7-01259581528f req-1549bf23-60e4-4c59-82a7-505f956c116c service nova] Acquiring lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.941790] env[61629]: DEBUG oslo_concurrency.lockutils [req-f47af008-8607-4834-8db7-01259581528f req-1549bf23-60e4-4c59-82a7-505f956c116c service nova] Lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.941993] env[61629]: DEBUG oslo_concurrency.lockutils [req-f47af008-8607-4834-8db7-01259581528f req-1549bf23-60e4-4c59-82a7-505f956c116c service nova] Lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.942213] env[61629]: DEBUG nova.compute.manager [req-f47af008-8607-4834-8db7-01259581528f req-1549bf23-60e4-4c59-82a7-505f956c116c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] No waiting events found dispatching network-vif-plugged-d666a690-afc1-4ce0-a878-192338b5dc0d {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 948.942419] env[61629]: WARNING nova.compute.manager [req-f47af008-8607-4834-8db7-01259581528f req-1549bf23-60e4-4c59-82a7-505f956c116c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Received unexpected event network-vif-plugged-d666a690-afc1-4ce0-a878-192338b5dc0d for instance with vm_state active and task_state None. [ 948.942631] env[61629]: DEBUG nova.compute.manager [req-f47af008-8607-4834-8db7-01259581528f req-1549bf23-60e4-4c59-82a7-505f956c116c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Received event network-changed-d666a690-afc1-4ce0-a878-192338b5dc0d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 948.942826] env[61629]: DEBUG nova.compute.manager [req-f47af008-8607-4834-8db7-01259581528f req-1549bf23-60e4-4c59-82a7-505f956c116c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Refreshing instance network info cache due to event network-changed-d666a690-afc1-4ce0-a878-192338b5dc0d. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 948.943115] env[61629]: DEBUG oslo_concurrency.lockutils [req-f47af008-8607-4834-8db7-01259581528f req-1549bf23-60e4-4c59-82a7-505f956c116c service nova] Acquiring lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.996509] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52688ac9-0900-c70d-3c4d-c9d4561f1380, 'name': SearchDatastore_Task, 'duration_secs': 0.080671} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.996838] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.998035] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 948.998035] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.998035] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.998035] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 948.998035] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5625d5d-1c7c-4ed7-a680-d19cd1a3de73 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.019790] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 949.020017] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 949.020790] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2da76084-6105-4f42-ab32-264d3a437b5d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.026961] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Waiting for the task: (returnval){ [ 949.026961] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52b597bd-4bd1-9266-cf1a-0cc28393a927" [ 949.026961] env[61629]: _type = "Task" [ 949.026961] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.043839] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52b597bd-4bd1-9266-cf1a-0cc28393a927, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.125347] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354388, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.191705} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.125710] env[61629]: INFO nova.virt.vmwareapi.ds_util [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_a92e522e-69e8-4234-b3a9-4b1412dec328/OSTACK_IMG_a92e522e-69e8-4234-b3a9-4b1412dec328.vmdk to [datastore1] devstack-image-cache_base/1e2172a0-89df-4cab-a61f-a1c2288e9094/1e2172a0-89df-4cab-a61f-a1c2288e9094.vmdk. [ 949.126010] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Cleaning up location [datastore1] OSTACK_IMG_a92e522e-69e8-4234-b3a9-4b1412dec328 {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 949.126261] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_a92e522e-69e8-4234-b3a9-4b1412dec328 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 949.126566] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94eca896-e1cc-41ab-8af6-41d8b314948c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.133300] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 949.133300] env[61629]: value = "task-1354395" [ 949.133300] env[61629]: _type = "Task" [ 949.133300] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.142737] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354395, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.185961] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5aa4c565-2a56-42e0-95c9-40d29e7229d5 tempest-ListServerFiltersTestJSON-321141740 tempest-ListServerFiltersTestJSON-321141740-project-member] Lock "9c340ca1-75e0-4d65-8aae-0d5e11ff3e66" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.600s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.296226] env[61629]: DEBUG nova.network.neutron [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Updating instance_info_cache with network_info: [{"id": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "address": "fa:16:3e:f8:02:ef", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa193ab2f-5a", "ovs_interfaceid": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d666a690-afc1-4ce0-a878-192338b5dc0d", "address": "fa:16:3e:8e:a4:c2", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd666a690-af", "ovs_interfaceid": "d666a690-afc1-4ce0-a878-192338b5dc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.422966] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b59b377-ec12-4edd-b067-4c0026f776b0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.430842] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb67562e-7d43-4687-afad-26686745f865 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.467533] env[61629]: DEBUG nova.network.neutron [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Successfully updated port: ed466750-9924-4d59-a6a8-bc0bdb462dfe {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 949.468878] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce475a2-0285-45bd-a24f-3f810efce82d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.478944] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d750934-2819-4438-a959-4707e1647773 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.484355] env[61629]: DEBUG nova.compute.manager [req-944b31d1-a310-47a8-a8bb-093b7545bb6d req-216455f9-ac34-447f-b362-13559ac4fb65 service nova] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Received event network-vif-plugged-ed466750-9924-4d59-a6a8-bc0bdb462dfe {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 949.484576] env[61629]: DEBUG oslo_concurrency.lockutils [req-944b31d1-a310-47a8-a8bb-093b7545bb6d req-216455f9-ac34-447f-b362-13559ac4fb65 service nova] Acquiring lock "3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.484789] env[61629]: DEBUG oslo_concurrency.lockutils [req-944b31d1-a310-47a8-a8bb-093b7545bb6d req-216455f9-ac34-447f-b362-13559ac4fb65 service nova] Lock "3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.484967] env[61629]: DEBUG oslo_concurrency.lockutils [req-944b31d1-a310-47a8-a8bb-093b7545bb6d req-216455f9-ac34-447f-b362-13559ac4fb65 service nova] Lock "3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.485261] env[61629]: DEBUG nova.compute.manager [req-944b31d1-a310-47a8-a8bb-093b7545bb6d req-216455f9-ac34-447f-b362-13559ac4fb65 service nova] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] No waiting events found dispatching network-vif-plugged-ed466750-9924-4d59-a6a8-bc0bdb462dfe {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 949.485977] env[61629]: WARNING nova.compute.manager [req-944b31d1-a310-47a8-a8bb-093b7545bb6d req-216455f9-ac34-447f-b362-13559ac4fb65 service nova] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Received unexpected event network-vif-plugged-ed466750-9924-4d59-a6a8-bc0bdb462dfe for instance with vm_state building and task_state spawning. [ 949.496803] env[61629]: DEBUG nova.compute.provider_tree [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.536943] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52b597bd-4bd1-9266-cf1a-0cc28393a927, 'name': SearchDatastore_Task, 'duration_secs': 0.040663} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.537902] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41587546-9fd1-4044-9e72-be48da3a6c31 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.543091] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Waiting for the task: (returnval){ [ 949.543091] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52c82342-f799-fe52-1023-8fafd9bad37d" [ 949.543091] env[61629]: _type = "Task" [ 949.543091] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.551114] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c82342-f799-fe52-1023-8fafd9bad37d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.643711] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354395, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033286} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.644084] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.644262] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1e2172a0-89df-4cab-a61f-a1c2288e9094/1e2172a0-89df-4cab-a61f-a1c2288e9094.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.644529] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1e2172a0-89df-4cab-a61f-a1c2288e9094/1e2172a0-89df-4cab-a61f-a1c2288e9094.vmdk to [datastore1] fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4/fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 949.644868] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89366bb8-63ff-4e41-ad56-0391839ee7e4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.651706] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 949.651706] env[61629]: value = "task-1354396" [ 949.651706] env[61629]: _type = "Task" [ 949.651706] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.659708] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354396, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.801278] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.802077] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.802700] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.802700] env[61629]: DEBUG oslo_concurrency.lockutils [req-f47af008-8607-4834-8db7-01259581528f req-1549bf23-60e4-4c59-82a7-505f956c116c service nova] Acquired lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.802863] env[61629]: DEBUG nova.network.neutron [req-f47af008-8607-4834-8db7-01259581528f req-1549bf23-60e4-4c59-82a7-505f956c116c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Refreshing network info cache for port d666a690-afc1-4ce0-a878-192338b5dc0d {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 949.805207] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff21e67-f931-4eff-8eff-8add019f62fa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.833911] env[61629]: DEBUG nova.virt.hardware [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 949.834760] env[61629]: DEBUG nova.virt.hardware [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 949.834760] env[61629]: DEBUG nova.virt.hardware [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 949.836161] env[61629]: DEBUG nova.virt.hardware [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 949.836161] env[61629]: DEBUG nova.virt.hardware [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 949.836161] env[61629]: DEBUG nova.virt.hardware [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 949.836392] env[61629]: DEBUG nova.virt.hardware [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 949.836722] env[61629]: DEBUG nova.virt.hardware [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 949.837286] env[61629]: DEBUG nova.virt.hardware [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 949.837449] env[61629]: DEBUG nova.virt.hardware [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 949.837821] env[61629]: DEBUG nova.virt.hardware [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 949.847933] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Reconfiguring VM to attach interface {{(pid=61629) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 949.850442] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63607972-9cd3-4aac-8a05-aeaf5fb53077 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.871588] env[61629]: DEBUG oslo_vmware.api [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 949.871588] env[61629]: value = "task-1354397" [ 949.871588] env[61629]: _type = "Task" [ 949.871588] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.884891] env[61629]: DEBUG oslo_vmware.api [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354397, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.974095] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "refresh_cache-3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.974095] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquired lock "refresh_cache-3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.974294] env[61629]: DEBUG nova.network.neutron [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 950.000119] env[61629]: DEBUG nova.scheduler.client.report [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 950.058847] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c82342-f799-fe52-1023-8fafd9bad37d, 'name': SearchDatastore_Task, 'duration_secs': 0.009131} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.059589] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.059957] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e/ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 950.060288] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5088b2f6-0e4f-470a-9e54-328f90523832 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.069132] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Waiting for the task: (returnval){ [ 950.069132] env[61629]: value = "task-1354398" [ 950.069132] env[61629]: _type = "Task" [ 950.069132] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.079560] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354398, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.165080] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354396, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.381913] env[61629]: DEBUG oslo_vmware.api [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354397, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.505066] env[61629]: DEBUG oslo_concurrency.lockutils [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.863s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.507928] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.901s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.509488] env[61629]: INFO nova.compute.claims [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.533214] env[61629]: DEBUG nova.network.neutron [req-f47af008-8607-4834-8db7-01259581528f req-1549bf23-60e4-4c59-82a7-505f956c116c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Updated VIF entry in instance network info cache for port d666a690-afc1-4ce0-a878-192338b5dc0d. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 950.533214] env[61629]: DEBUG nova.network.neutron [req-f47af008-8607-4834-8db7-01259581528f req-1549bf23-60e4-4c59-82a7-505f956c116c service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Updating instance_info_cache with network_info: [{"id": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "address": "fa:16:3e:f8:02:ef", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa193ab2f-5a", "ovs_interfaceid": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d666a690-afc1-4ce0-a878-192338b5dc0d", "address": "fa:16:3e:8e:a4:c2", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd666a690-af", "ovs_interfaceid": "d666a690-afc1-4ce0-a878-192338b5dc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.535640] env[61629]: INFO nova.scheduler.client.report [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Deleted allocations for instance 2b01eeae-64be-44b3-b4cf-c2a8490043e3 [ 950.545719] env[61629]: DEBUG nova.network.neutron [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 950.585882] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354398, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.669231] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354396, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.791995] env[61629]: DEBUG nova.network.neutron [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Updating instance_info_cache with network_info: [{"id": "ed466750-9924-4d59-a6a8-bc0bdb462dfe", "address": "fa:16:3e:4d:09:5e", "network": {"id": "249c4ba3-38e0-421a-91b6-cf97f90eb535", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1700423127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd318d29ec50427eb997c83837120c9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped466750-99", "ovs_interfaceid": "ed466750-9924-4d59-a6a8-bc0bdb462dfe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.886922] env[61629]: DEBUG oslo_vmware.api [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354397, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.037877] env[61629]: DEBUG oslo_concurrency.lockutils [req-f47af008-8607-4834-8db7-01259581528f req-1549bf23-60e4-4c59-82a7-505f956c116c service nova] Releasing lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.045695] env[61629]: DEBUG oslo_concurrency.lockutils [None req-29b4b1b8-b883-422c-b6da-dbbd8386ee54 tempest-AttachVolumeTestJSON-41894217 tempest-AttachVolumeTestJSON-41894217-project-member] Lock "2b01eeae-64be-44b3-b4cf-c2a8490043e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.422s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.085268] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354398, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.164823] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354396, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.295485] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Releasing lock "refresh_cache-3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.296285] env[61629]: DEBUG nova.compute.manager [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Instance network_info: |[{"id": "ed466750-9924-4d59-a6a8-bc0bdb462dfe", "address": "fa:16:3e:4d:09:5e", "network": {"id": "249c4ba3-38e0-421a-91b6-cf97f90eb535", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1700423127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd318d29ec50427eb997c83837120c9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped466750-99", "ovs_interfaceid": "ed466750-9924-4d59-a6a8-bc0bdb462dfe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 951.296285] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:09:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ed466750-9924-4d59-a6a8-bc0bdb462dfe', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 951.303890] env[61629]: DEBUG oslo.service.loopingcall [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 951.304176] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 951.304420] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e330ea70-ece3-485e-b91d-9af5b3ac42b2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.328040] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 951.328040] env[61629]: value = "task-1354400" [ 951.328040] env[61629]: _type = "Task" [ 951.328040] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.339335] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354400, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.385143] env[61629]: DEBUG oslo_vmware.api [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354397, 'name': ReconfigVM_Task, 'duration_secs': 1.123967} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.386325] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.386325] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Reconfigured VM to attach interface {{(pid=61629) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 951.580397] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354398, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.667649] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354396, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.833891] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2943bd0c-a6b0-4505-a55e-f1098a70eed9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.847421] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354400, 'name': CreateVM_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.848551] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448d8db8-b9d6-47c5-8f47-a06017ccd848 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.887367] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ecde1a-157b-45f3-948b-c87a65744931 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.893439] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b52fddcb-4d9d-487f-8ad7-f9c2f87c03c2 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "interface-7c3e9d0f-88a8-41fe-bf61-e3db34d36928-d666a690-afc1-4ce0-a878-192338b5dc0d" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.198s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.898943] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d672f2f4-c550-46eb-a334-90a48f160ac3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.918244] env[61629]: DEBUG nova.compute.provider_tree [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 952.082588] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354398, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.167069] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354396, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.249235] env[61629]: DEBUG nova.compute.manager [req-abcb41e3-3452-4cf5-ac76-88867689a229 req-928bc2f9-a0fe-439c-97bb-e0f5b7ef93f9 service nova] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Received event network-changed-ed466750-9924-4d59-a6a8-bc0bdb462dfe {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 952.249235] env[61629]: DEBUG nova.compute.manager [req-abcb41e3-3452-4cf5-ac76-88867689a229 req-928bc2f9-a0fe-439c-97bb-e0f5b7ef93f9 service nova] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Refreshing instance network info cache due to event network-changed-ed466750-9924-4d59-a6a8-bc0bdb462dfe. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 952.249878] env[61629]: DEBUG oslo_concurrency.lockutils [req-abcb41e3-3452-4cf5-ac76-88867689a229 req-928bc2f9-a0fe-439c-97bb-e0f5b7ef93f9 service nova] Acquiring lock "refresh_cache-3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.250306] env[61629]: DEBUG oslo_concurrency.lockutils [req-abcb41e3-3452-4cf5-ac76-88867689a229 req-928bc2f9-a0fe-439c-97bb-e0f5b7ef93f9 service nova] Acquired lock "refresh_cache-3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.252244] env[61629]: DEBUG nova.network.neutron [req-abcb41e3-3452-4cf5-ac76-88867689a229 req-928bc2f9-a0fe-439c-97bb-e0f5b7ef93f9 service nova] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Refreshing network info cache for port ed466750-9924-4d59-a6a8-bc0bdb462dfe {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 952.341142] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354400, 'name': CreateVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.526183] env[61629]: ERROR nova.scheduler.client.report [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [req-0a8a68f4-827e-466b-9bc6-360ef8514146] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID d075eff1-6f77-44a8-824e-16f3e03b4063. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0a8a68f4-827e-466b-9bc6-360ef8514146"}]} [ 952.543704] env[61629]: DEBUG nova.scheduler.client.report [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Refreshing inventories for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 952.558804] env[61629]: DEBUG nova.scheduler.client.report [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Updating ProviderTree inventory for provider d075eff1-6f77-44a8-824e-16f3e03b4063 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 952.559091] env[61629]: DEBUG nova.compute.provider_tree [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 952.571656] env[61629]: DEBUG nova.scheduler.client.report [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Refreshing aggregate associations for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063, aggregates: None {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 952.584508] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354398, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.593077] env[61629]: DEBUG nova.scheduler.client.report [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Refreshing trait associations for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 952.667824] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354396, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.749413} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.668195] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1e2172a0-89df-4cab-a61f-a1c2288e9094/1e2172a0-89df-4cab-a61f-a1c2288e9094.vmdk to [datastore1] fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4/fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 952.671019] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211997a4-22fa-4c67-8770-cc1227b9e914 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.702970] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4/fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4.vmdk or device None with type streamOptimized {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 952.705621] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70ca7fcc-0a72-4ac3-b946-de67ecfe2004 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.727875] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 952.727875] env[61629]: value = "task-1354401" [ 952.727875] env[61629]: _type = "Task" [ 952.727875] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.738297] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354401, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.843759] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354400, 'name': CreateVM_Task, 'duration_secs': 1.104803} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.843759] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 952.844296] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.845252] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.848030] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 952.848030] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7ffe528-c6f4-431e-9810-493f7c712026 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.854178] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 952.854178] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52396d17-984d-dc91-7a58-69333de049eb" [ 952.854178] env[61629]: _type = "Task" [ 952.854178] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.862488] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Volume attach. Driver type: vmdk {{(pid=61629) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 952.862730] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288554', 'volume_id': 'd653648b-258a-40e8-bd4a-ee2981909fd0', 'name': 'volume-d653648b-258a-40e8-bd4a-ee2981909fd0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '274e3437-eacd-4299-9c27-97bbb0ebf1c1', 'attached_at': '', 'detached_at': '', 'volume_id': 'd653648b-258a-40e8-bd4a-ee2981909fd0', 'serial': 'd653648b-258a-40e8-bd4a-ee2981909fd0'} {{(pid=61629) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 952.865138] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d865e142-67b9-48b7-af10-32729e841d29 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.870146] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52396d17-984d-dc91-7a58-69333de049eb, 'name': SearchDatastore_Task, 'duration_secs': 0.00919} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.870934] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.871517] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 952.871811] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.871995] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.872462] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 952.887234] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b18861b1-f592-4146-8559-ca02c67d6572 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.896166] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9974527-3083-4b9d-82d3-a3ba53fdc688 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.922859] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] volume-d653648b-258a-40e8-bd4a-ee2981909fd0/volume-d653648b-258a-40e8-bd4a-ee2981909fd0.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 952.925205] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cacc9aa3-9f62-42c1-a5ee-055b4706aab5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.939818] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 952.939818] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 952.940274] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae97d7e6-83b3-44c7-8426-8ee27d0eb9a5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.945733] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eddefa5e-4d80-4de3-a155-1fb03ae87e5b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.956020] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7c76cb-a801-4200-8aad-9ded66d056b0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.957455] env[61629]: DEBUG oslo_vmware.api [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for the task: (returnval){ [ 952.957455] env[61629]: value = "task-1354402" [ 952.957455] env[61629]: _type = "Task" [ 952.957455] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.958514] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 952.958514] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52de6e1e-e444-0ad0-71e0-b5e868340b2d" [ 952.958514] env[61629]: _type = "Task" [ 952.958514] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.992355] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f399df7c-ef48-4cd8-9942-77719a08d8a8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.998178] env[61629]: DEBUG oslo_vmware.api [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.002843] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52de6e1e-e444-0ad0-71e0-b5e868340b2d, 'name': SearchDatastore_Task, 'duration_secs': 0.010371} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.005578] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4513cf3-bd88-41a0-8df3-1e063443d5b2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.009631] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e952a88d-7f9c-48c2-84c5-d6cf57912c89 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.020243] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 953.020243] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]520c3632-7ed1-8a3a-0a6a-b73826d404fe" [ 953.020243] env[61629]: _type = "Task" [ 953.020243] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.028647] env[61629]: DEBUG nova.compute.provider_tree [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 953.039853] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]520c3632-7ed1-8a3a-0a6a-b73826d404fe, 'name': SearchDatastore_Task, 'duration_secs': 0.011711} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.040245] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.040406] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52/3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 953.042153] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03f7234c-009d-4c75-a2eb-00d25e9e0065 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.047929] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 953.047929] env[61629]: value = "task-1354403" [ 953.047929] env[61629]: _type = "Task" [ 953.047929] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.056013] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.083688] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354398, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.535248} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.083973] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e/ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 953.084226] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 953.084507] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e1e6191-78a0-4611-896b-d385d73a6af2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.091067] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Waiting for the task: (returnval){ [ 953.091067] env[61629]: value = "task-1354404" [ 953.091067] env[61629]: _type = "Task" [ 953.091067] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.099500] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354404, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.238391] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354401, 'name': ReconfigVM_Task, 'duration_secs': 0.294168} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.238578] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Reconfigured VM instance instance-00000040 to attach disk [datastore1] fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4/fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4.vmdk or device None with type streamOptimized {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 953.239353] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8be7bb1-48df-4ad8-a0e4-ddabf906ef2c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.250475] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 953.250475] env[61629]: value = "task-1354405" [ 953.250475] env[61629]: _type = "Task" [ 953.250475] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.260333] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354405, 'name': Rename_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.265168] env[61629]: DEBUG nova.network.neutron [req-abcb41e3-3452-4cf5-ac76-88867689a229 req-928bc2f9-a0fe-439c-97bb-e0f5b7ef93f9 service nova] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Updated VIF entry in instance network info cache for port ed466750-9924-4d59-a6a8-bc0bdb462dfe. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 953.265467] env[61629]: DEBUG nova.network.neutron [req-abcb41e3-3452-4cf5-ac76-88867689a229 req-928bc2f9-a0fe-439c-97bb-e0f5b7ef93f9 service nova] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Updating instance_info_cache with network_info: [{"id": "ed466750-9924-4d59-a6a8-bc0bdb462dfe", "address": "fa:16:3e:4d:09:5e", "network": {"id": "249c4ba3-38e0-421a-91b6-cf97f90eb535", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1700423127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd318d29ec50427eb997c83837120c9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped466750-99", "ovs_interfaceid": "ed466750-9924-4d59-a6a8-bc0bdb462dfe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.470470] env[61629]: DEBUG oslo_vmware.api [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354402, 'name': ReconfigVM_Task, 'duration_secs': 0.3747} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.470790] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Reconfigured VM instance instance-00000051 to attach disk [datastore2] volume-d653648b-258a-40e8-bd4a-ee2981909fd0/volume-d653648b-258a-40e8-bd4a-ee2981909fd0.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 953.476333] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-509343f2-5a09-489f-8a9b-38bd1406e2a0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.492327] env[61629]: DEBUG oslo_vmware.api [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for the task: (returnval){ [ 953.492327] env[61629]: value = "task-1354406" [ 953.492327] env[61629]: _type = "Task" [ 953.492327] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.504163] env[61629]: DEBUG oslo_vmware.api [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354406, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.558535] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354403, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.569472] env[61629]: DEBUG nova.scheduler.client.report [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Updated inventory for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with generation 108 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 953.569758] env[61629]: DEBUG nova.compute.provider_tree [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Updating resource provider d075eff1-6f77-44a8-824e-16f3e03b4063 generation from 108 to 109 during operation: update_inventory {{(pid=61629) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 953.569995] env[61629]: DEBUG nova.compute.provider_tree [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 953.604326] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354404, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094122} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.604846] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 953.606023] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23056156-a508-43c4-ab72-c380b081902b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.633924] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e/ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 953.634328] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7ce5774-162c-4891-9eb8-944cbb51c94c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.658614] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Waiting for the task: (returnval){ [ 953.658614] env[61629]: value = "task-1354407" [ 953.658614] env[61629]: _type = "Task" [ 953.658614] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.670590] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354407, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.764125] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354405, 'name': Rename_Task, 'duration_secs': 0.149232} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.764125] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 953.764125] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30988f79-165e-401d-ba35-7e94dd1f30cb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.768751] env[61629]: DEBUG oslo_concurrency.lockutils [req-abcb41e3-3452-4cf5-ac76-88867689a229 req-928bc2f9-a0fe-439c-97bb-e0f5b7ef93f9 service nova] Releasing lock "refresh_cache-3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.770302] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 953.770302] env[61629]: value = "task-1354408" [ 953.770302] env[61629]: _type = "Task" [ 953.770302] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.778690] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354408, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.002139] env[61629]: DEBUG oslo_vmware.api [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354406, 'name': ReconfigVM_Task, 'duration_secs': 0.168228} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.002446] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288554', 'volume_id': 'd653648b-258a-40e8-bd4a-ee2981909fd0', 'name': 'volume-d653648b-258a-40e8-bd4a-ee2981909fd0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '274e3437-eacd-4299-9c27-97bbb0ebf1c1', 'attached_at': '', 'detached_at': '', 'volume_id': 'd653648b-258a-40e8-bd4a-ee2981909fd0', 'serial': 'd653648b-258a-40e8-bd4a-ee2981909fd0'} {{(pid=61629) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 954.060605] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354403, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.654831} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.061119] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52/3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 954.061504] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 954.064010] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce9c7dca-fbe8-4967-a416-4015d3991b74 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.071173] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 954.071173] env[61629]: value = "task-1354410" [ 954.071173] env[61629]: _type = "Task" [ 954.071173] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.077689] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.570s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.078604] env[61629]: DEBUG nova.compute.manager [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 954.081437] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354410, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.082031] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.318s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.083638] env[61629]: INFO nova.compute.claims [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 954.156161] env[61629]: DEBUG oslo_concurrency.lockutils [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "interface-7c3e9d0f-88a8-41fe-bf61-e3db34d36928-d666a690-afc1-4ce0-a878-192338b5dc0d" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.156161] env[61629]: DEBUG oslo_concurrency.lockutils [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "interface-7c3e9d0f-88a8-41fe-bf61-e3db34d36928-d666a690-afc1-4ce0-a878-192338b5dc0d" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.168647] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.280357] env[61629]: DEBUG oslo_vmware.api [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354408, 'name': PowerOnVM_Task, 'duration_secs': 0.469817} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.280655] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 954.404480] env[61629]: DEBUG nova.compute.manager [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 954.405412] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1ba38a-bda3-46eb-8f4c-b36762e79164 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.579110] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354410, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072237} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.579309] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 954.580177] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e71b04-1e96-4a59-a573-8256e3f9c85b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.586017] env[61629]: DEBUG nova.compute.utils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 954.586166] env[61629]: DEBUG nova.compute.manager [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 954.586339] env[61629]: DEBUG nova.network.neutron [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 954.614023] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52/3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.617086] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a2198ee-2f76-4c2b-a5fd-8acd4ff75865 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.637530] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 954.637530] env[61629]: value = "task-1354411" [ 954.637530] env[61629]: _type = "Task" [ 954.637530] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.646495] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354411, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.659454] env[61629]: DEBUG oslo_concurrency.lockutils [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.660094] env[61629]: DEBUG oslo_concurrency.lockutils [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.661178] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03292176-faba-43cc-83f4-7fc1bf2b1448 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.665355] env[61629]: DEBUG nova.policy [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec39705b9dd24915a0b3723ea45a85d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38efdd2cc07f45a49fb06d590aafb96b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 954.694702] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf59646-24ba-40ad-a8cf-59a6e32818b5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.697369] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.724520] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Reconfiguring VM to detach interface {{(pid=61629) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 954.727591] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83ca81c5-51a2-4928-a98d-5bb17d7c266d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.747487] env[61629]: DEBUG oslo_vmware.api [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 954.747487] env[61629]: value = "task-1354412" [ 954.747487] env[61629]: _type = "Task" [ 954.747487] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.755304] env[61629]: DEBUG oslo_vmware.api [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354412, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.919641] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa3c8b1d-c8c5-49c8-bd59-8ccea02c5454 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.924475] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d67e515-79d7-40b4-8142-ff25ec1e8ff6 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 34.119s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.928586] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8d1267-b635-44be-a633-f9080dd1ec65 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.959691] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dbfcaa2-c18f-4335-8c3b-820383843d33 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.967296] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bff8f4d-28c3-4736-b986-835ad9024c59 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.980735] env[61629]: DEBUG nova.compute.provider_tree [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.040736] env[61629]: DEBUG nova.objects.instance [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lazy-loading 'flavor' on Instance uuid 274e3437-eacd-4299-9c27-97bbb0ebf1c1 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.089797] env[61629]: DEBUG nova.compute.manager [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 955.148509] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354411, 'name': ReconfigVM_Task, 'duration_secs': 0.288614} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.148509] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52/3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 955.148976] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5507faf4-dbb6-47fa-91de-5cf66c233de3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.156408] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 955.156408] env[61629]: value = "task-1354413" [ 955.156408] env[61629]: _type = "Task" [ 955.156408] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.167243] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354413, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.175727] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.225848] env[61629]: DEBUG nova.network.neutron [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Successfully created port: ff773335-988b-491d-bfab-ce5568c05be8 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 955.257890] env[61629]: DEBUG oslo_vmware.api [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.484321] env[61629]: DEBUG nova.scheduler.client.report [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 955.545783] env[61629]: DEBUG oslo_concurrency.lockutils [None req-cd65234e-edc6-4a93-8cc0-c2c6b284bfea tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.342s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.671259] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354413, 'name': Rename_Task, 'duration_secs': 0.144451} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.675062] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 955.675857] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.676147] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3efccc5f-29fb-4ad9-abe9-f05e5d772481 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.682663] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 955.682663] env[61629]: value = "task-1354414" [ 955.682663] env[61629]: _type = "Task" [ 955.682663] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.689808] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354414, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.757909] env[61629]: DEBUG oslo_vmware.api [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.818741] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.818741] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.988818] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.907s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.989373] env[61629]: DEBUG nova.compute.manager [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 955.991987] env[61629]: DEBUG oslo_concurrency.lockutils [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 10.662s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.099059] env[61629]: DEBUG nova.compute.manager [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 956.123990] env[61629]: DEBUG nova.virt.hardware [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 956.124273] env[61629]: DEBUG nova.virt.hardware [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 956.124440] env[61629]: DEBUG nova.virt.hardware [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 956.124626] env[61629]: DEBUG nova.virt.hardware [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 956.124782] env[61629]: DEBUG nova.virt.hardware [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 956.124935] env[61629]: DEBUG nova.virt.hardware [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 956.125167] env[61629]: DEBUG nova.virt.hardware [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 956.125335] env[61629]: DEBUG nova.virt.hardware [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 956.125506] env[61629]: DEBUG nova.virt.hardware [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 956.125675] env[61629]: DEBUG nova.virt.hardware [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 956.125855] env[61629]: DEBUG nova.virt.hardware [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 956.126730] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25befc9-f2b3-464a-a9c9-adea123ff628 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.135098] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b8a629-632d-491c-9143-6fca015f45c9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.171725] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354407, 'name': ReconfigVM_Task, 'duration_secs': 2.120085} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.172025] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Reconfigured VM instance instance-0000005a to attach disk [datastore1] ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e/ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 956.172653] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83831298-31bf-4971-b787-0ca1c2badfc3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.178526] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Waiting for the task: (returnval){ [ 956.178526] env[61629]: value = "task-1354415" [ 956.178526] env[61629]: _type = "Task" [ 956.178526] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.189246] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354415, 'name': Rename_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.194380] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354414, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.260438] env[61629]: DEBUG oslo_vmware.api [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.321668] env[61629]: INFO nova.compute.manager [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Detaching volume d653648b-258a-40e8-bd4a-ee2981909fd0 [ 956.353913] env[61629]: INFO nova.virt.block_device [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Attempting to driver detach volume d653648b-258a-40e8-bd4a-ee2981909fd0 from mountpoint /dev/sdb [ 956.354329] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Volume detach. Driver type: vmdk {{(pid=61629) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 956.354540] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288554', 'volume_id': 'd653648b-258a-40e8-bd4a-ee2981909fd0', 'name': 'volume-d653648b-258a-40e8-bd4a-ee2981909fd0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '274e3437-eacd-4299-9c27-97bbb0ebf1c1', 'attached_at': '', 'detached_at': '', 'volume_id': 'd653648b-258a-40e8-bd4a-ee2981909fd0', 'serial': 'd653648b-258a-40e8-bd4a-ee2981909fd0'} {{(pid=61629) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 956.355589] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a14249-441f-4716-9f0b-582c27e2e28b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.382911] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea530cf-b0b4-4d82-a703-0685e7913821 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.390429] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90f770f-03a9-4aec-bf8f-ab191dec5918 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.411911] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed78cf9-4dff-430d-a71c-143e66cc8a9e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.426956] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] The volume has not been displaced from its original location: [datastore2] volume-d653648b-258a-40e8-bd4a-ee2981909fd0/volume-d653648b-258a-40e8-bd4a-ee2981909fd0.vmdk. No consolidation needed. {{(pid=61629) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 956.432472] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Reconfiguring VM instance instance-00000051 to detach disk 2001 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 956.432817] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5d787e1-06bb-4e18-816e-11963fcb40a2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.451911] env[61629]: DEBUG oslo_vmware.api [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for the task: (returnval){ [ 956.451911] env[61629]: value = "task-1354416" [ 956.451911] env[61629]: _type = "Task" [ 956.451911] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.460594] env[61629]: DEBUG oslo_vmware.api [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354416, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.495501] env[61629]: DEBUG nova.compute.utils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 956.497310] env[61629]: DEBUG nova.compute.manager [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 956.497524] env[61629]: DEBUG nova.network.neutron [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 956.569069] env[61629]: DEBUG nova.policy [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb6d2f45e922463d97bbb530b6ea464c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19ad861ed9724d2f9d39100c9044a94b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 956.693841] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354415, 'name': Rename_Task, 'duration_secs': 0.146903} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.693841] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 956.694138] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-deef55be-56e1-47d2-ad00-ccc22c2c3596 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.699267] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354414, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.703375] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Waiting for the task: (returnval){ [ 956.703375] env[61629]: value = "task-1354417" [ 956.703375] env[61629]: _type = "Task" [ 956.703375] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.714433] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354417, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.766157] env[61629]: DEBUG oslo_vmware.api [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.772996] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd98aedc-37a9-48da-b77d-7c0e8321a5fb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.781960] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bebfb1-d90e-45c8-a3d2-155e590cfa95 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.819474] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56a2f8b-ddff-44dd-8220-6b834ce4a498 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.829096] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30a7df3-c2f4-4ab0-8fab-60069be96fe0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.846030] env[61629]: DEBUG nova.compute.provider_tree [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.962614] env[61629]: DEBUG oslo_vmware.api [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354416, 'name': ReconfigVM_Task, 'duration_secs': 0.243757} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.963443] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Reconfigured VM instance instance-00000051 to detach disk 2001 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 956.968599] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ff04ab2-3186-47d5-9b99-e3a27afdf33d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.986584] env[61629]: DEBUG oslo_vmware.api [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for the task: (returnval){ [ 956.986584] env[61629]: value = "task-1354419" [ 956.986584] env[61629]: _type = "Task" [ 956.986584] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.997262] env[61629]: DEBUG oslo_vmware.api [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354419, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.004081] env[61629]: DEBUG nova.compute.manager [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 957.070636] env[61629]: DEBUG nova.network.neutron [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Successfully created port: 5edebde4-4d2e-4f37-b5c1-b7edf504e2d9 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 957.076683] env[61629]: DEBUG nova.compute.manager [req-3027eaac-a52e-4492-9b04-d9898530cc97 req-cd4930d4-9be4-4425-a1d7-b3e479640094 service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Received event network-vif-plugged-ff773335-988b-491d-bfab-ce5568c05be8 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 957.077081] env[61629]: DEBUG oslo_concurrency.lockutils [req-3027eaac-a52e-4492-9b04-d9898530cc97 req-cd4930d4-9be4-4425-a1d7-b3e479640094 service nova] Acquiring lock "a08e5762-5307-4dd8-a025-a1cdfd43025e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.077445] env[61629]: DEBUG oslo_concurrency.lockutils [req-3027eaac-a52e-4492-9b04-d9898530cc97 req-cd4930d4-9be4-4425-a1d7-b3e479640094 service nova] Lock "a08e5762-5307-4dd8-a025-a1cdfd43025e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.077764] env[61629]: DEBUG oslo_concurrency.lockutils [req-3027eaac-a52e-4492-9b04-d9898530cc97 req-cd4930d4-9be4-4425-a1d7-b3e479640094 service nova] Lock "a08e5762-5307-4dd8-a025-a1cdfd43025e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.078079] env[61629]: DEBUG nova.compute.manager [req-3027eaac-a52e-4492-9b04-d9898530cc97 req-cd4930d4-9be4-4425-a1d7-b3e479640094 service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] No waiting events found dispatching network-vif-plugged-ff773335-988b-491d-bfab-ce5568c05be8 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 957.078398] env[61629]: WARNING nova.compute.manager [req-3027eaac-a52e-4492-9b04-d9898530cc97 req-cd4930d4-9be4-4425-a1d7-b3e479640094 service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Received unexpected event network-vif-plugged-ff773335-988b-491d-bfab-ce5568c05be8 for instance with vm_state building and task_state spawning. [ 957.193981] env[61629]: DEBUG oslo_vmware.api [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354414, 'name': PowerOnVM_Task, 'duration_secs': 1.502213} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.194185] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 957.194331] env[61629]: INFO nova.compute.manager [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Took 8.42 seconds to spawn the instance on the hypervisor. [ 957.194542] env[61629]: DEBUG nova.compute.manager [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 957.195422] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384e4564-a941-4a25-b61a-a5aca9bfaf7c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.213352] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354417, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.260978] env[61629]: DEBUG oslo_vmware.api [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.347599] env[61629]: DEBUG nova.scheduler.client.report [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 957.498148] env[61629]: DEBUG oslo_vmware.api [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354419, 'name': ReconfigVM_Task, 'duration_secs': 0.202419} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.498292] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288554', 'volume_id': 'd653648b-258a-40e8-bd4a-ee2981909fd0', 'name': 'volume-d653648b-258a-40e8-bd4a-ee2981909fd0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '274e3437-eacd-4299-9c27-97bbb0ebf1c1', 'attached_at': '', 'detached_at': '', 'volume_id': 'd653648b-258a-40e8-bd4a-ee2981909fd0', 'serial': 'd653648b-258a-40e8-bd4a-ee2981909fd0'} {{(pid=61629) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 957.651820] env[61629]: DEBUG nova.network.neutron [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Successfully updated port: ff773335-988b-491d-bfab-ce5568c05be8 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 957.676133] env[61629]: DEBUG nova.compute.manager [req-becc88ca-de47-4657-a098-2dce8c604645 req-ea2516c5-b387-4323-a554-d0de6d4b401c service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Received event network-changed-ff773335-988b-491d-bfab-ce5568c05be8 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 957.676133] env[61629]: DEBUG nova.compute.manager [req-becc88ca-de47-4657-a098-2dce8c604645 req-ea2516c5-b387-4323-a554-d0de6d4b401c service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Refreshing instance network info cache due to event network-changed-ff773335-988b-491d-bfab-ce5568c05be8. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 957.676447] env[61629]: DEBUG oslo_concurrency.lockutils [req-becc88ca-de47-4657-a098-2dce8c604645 req-ea2516c5-b387-4323-a554-d0de6d4b401c service nova] Acquiring lock "refresh_cache-a08e5762-5307-4dd8-a025-a1cdfd43025e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.676521] env[61629]: DEBUG oslo_concurrency.lockutils [req-becc88ca-de47-4657-a098-2dce8c604645 req-ea2516c5-b387-4323-a554-d0de6d4b401c service nova] Acquired lock "refresh_cache-a08e5762-5307-4dd8-a025-a1cdfd43025e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.676634] env[61629]: DEBUG nova.network.neutron [req-becc88ca-de47-4657-a098-2dce8c604645 req-ea2516c5-b387-4323-a554-d0de6d4b401c service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Refreshing network info cache for port ff773335-988b-491d-bfab-ce5568c05be8 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 957.719817] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354417, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.720772] env[61629]: INFO nova.compute.manager [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Took 25.71 seconds to build instance. [ 957.761172] env[61629]: DEBUG oslo_vmware.api [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.015045] env[61629]: DEBUG nova.compute.manager [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 958.042083] env[61629]: DEBUG nova.virt.hardware [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 958.042351] env[61629]: DEBUG nova.virt.hardware [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 958.042514] env[61629]: DEBUG nova.virt.hardware [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 958.042704] env[61629]: DEBUG nova.virt.hardware [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 958.042856] env[61629]: DEBUG nova.virt.hardware [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 958.043041] env[61629]: DEBUG nova.virt.hardware [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 958.043239] env[61629]: DEBUG nova.virt.hardware [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 958.043401] env[61629]: DEBUG nova.virt.hardware [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 958.043572] env[61629]: DEBUG nova.virt.hardware [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 958.043740] env[61629]: DEBUG nova.virt.hardware [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 958.044299] env[61629]: DEBUG nova.virt.hardware [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 958.045239] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2fdd54-6204-4b39-a860-b4d1186ef68e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.049042] env[61629]: DEBUG nova.objects.instance [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lazy-loading 'flavor' on Instance uuid 274e3437-eacd-4299-9c27-97bbb0ebf1c1 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.055902] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-224772fa-db3c-4412-8fa5-f7ed35c94513 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.155658] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "refresh_cache-a08e5762-5307-4dd8-a025-a1cdfd43025e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.214816] env[61629]: DEBUG oslo_vmware.api [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354417, 'name': PowerOnVM_Task, 'duration_secs': 1.045719} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.215128] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 958.215341] env[61629]: INFO nova.compute.manager [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Took 15.64 seconds to spawn the instance on the hypervisor. [ 958.215654] env[61629]: DEBUG nova.compute.manager [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 958.216326] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072bf712-7260-494a-a4d6-c660e954fa3e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.219728] env[61629]: DEBUG nova.network.neutron [req-becc88ca-de47-4657-a098-2dce8c604645 req-ea2516c5-b387-4323-a554-d0de6d4b401c service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 958.223094] env[61629]: DEBUG oslo_concurrency.lockutils [None req-adf5c814-52f6-4181-8743-c4e50125df38 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.218s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.260661] env[61629]: DEBUG oslo_vmware.api [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.305655] env[61629]: DEBUG oslo_concurrency.lockutils [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.305932] env[61629]: DEBUG oslo_concurrency.lockutils [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.306175] env[61629]: DEBUG oslo_concurrency.lockutils [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.306367] env[61629]: DEBUG oslo_concurrency.lockutils [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.306541] env[61629]: DEBUG oslo_concurrency.lockutils [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.308737] env[61629]: INFO nova.compute.manager [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Terminating instance [ 958.310495] env[61629]: DEBUG nova.compute.manager [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 958.310692] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 958.311491] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2d245e-b02d-45d7-b80a-0bb3d47efab8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.322281] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 958.322519] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fbff2cb5-f6a5-4434-988f-28fd98feaafa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.327589] env[61629]: DEBUG oslo_vmware.api [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 958.327589] env[61629]: value = "task-1354420" [ 958.327589] env[61629]: _type = "Task" [ 958.327589] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.334816] env[61629]: DEBUG oslo_vmware.api [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354420, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.366035] env[61629]: DEBUG oslo_concurrency.lockutils [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.373s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.369674] env[61629]: DEBUG oslo_concurrency.lockutils [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.777s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.369674] env[61629]: DEBUG nova.objects.instance [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lazy-loading 'resources' on Instance uuid 2ce60374-7baf-4d27-afbd-dcfaf6600a78 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.380324] env[61629]: DEBUG nova.network.neutron [req-becc88ca-de47-4657-a098-2dce8c604645 req-ea2516c5-b387-4323-a554-d0de6d4b401c service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.584210] env[61629]: DEBUG nova.compute.manager [req-5b0e5412-ee2a-4584-aa81-55ce3121db24 req-6e98b729-dc50-4a18-86c8-6d0345fbe48a service nova] [instance: 87172592-f557-467f-ace2-805fd822681d] Received event network-vif-plugged-5edebde4-4d2e-4f37-b5c1-b7edf504e2d9 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 958.584426] env[61629]: DEBUG oslo_concurrency.lockutils [req-5b0e5412-ee2a-4584-aa81-55ce3121db24 req-6e98b729-dc50-4a18-86c8-6d0345fbe48a service nova] Acquiring lock "87172592-f557-467f-ace2-805fd822681d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.584637] env[61629]: DEBUG oslo_concurrency.lockutils [req-5b0e5412-ee2a-4584-aa81-55ce3121db24 req-6e98b729-dc50-4a18-86c8-6d0345fbe48a service nova] Lock "87172592-f557-467f-ace2-805fd822681d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.584811] env[61629]: DEBUG oslo_concurrency.lockutils [req-5b0e5412-ee2a-4584-aa81-55ce3121db24 req-6e98b729-dc50-4a18-86c8-6d0345fbe48a service nova] Lock "87172592-f557-467f-ace2-805fd822681d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.584978] env[61629]: DEBUG nova.compute.manager [req-5b0e5412-ee2a-4584-aa81-55ce3121db24 req-6e98b729-dc50-4a18-86c8-6d0345fbe48a service nova] [instance: 87172592-f557-467f-ace2-805fd822681d] No waiting events found dispatching network-vif-plugged-5edebde4-4d2e-4f37-b5c1-b7edf504e2d9 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 958.585513] env[61629]: WARNING nova.compute.manager [req-5b0e5412-ee2a-4584-aa81-55ce3121db24 req-6e98b729-dc50-4a18-86c8-6d0345fbe48a service nova] [instance: 87172592-f557-467f-ace2-805fd822681d] Received unexpected event network-vif-plugged-5edebde4-4d2e-4f37-b5c1-b7edf504e2d9 for instance with vm_state building and task_state spawning. [ 958.675151] env[61629]: DEBUG nova.network.neutron [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Successfully updated port: 5edebde4-4d2e-4f37-b5c1-b7edf504e2d9 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 958.733863] env[61629]: INFO nova.compute.manager [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Took 32.36 seconds to build instance. [ 958.761558] env[61629]: DEBUG oslo_vmware.api [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.837026] env[61629]: DEBUG oslo_vmware.api [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354420, 'name': PowerOffVM_Task, 'duration_secs': 0.155789} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.837181] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 958.837310] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 958.837561] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ee0a790-066a-4f58-bc2b-77cc9f963d04 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.883573] env[61629]: DEBUG oslo_concurrency.lockutils [req-becc88ca-de47-4657-a098-2dce8c604645 req-ea2516c5-b387-4323-a554-d0de6d4b401c service nova] Releasing lock "refresh_cache-a08e5762-5307-4dd8-a025-a1cdfd43025e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.883941] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "refresh_cache-a08e5762-5307-4dd8-a025-a1cdfd43025e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.884172] env[61629]: DEBUG nova.network.neutron [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 958.900715] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 958.900985] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 958.901196] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Deleting the datastore file [datastore2] 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 958.901688] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5400862-13e6-4c60-a7a5-807fb7459792 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.907751] env[61629]: DEBUG oslo_vmware.api [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 958.907751] env[61629]: value = "task-1354422" [ 958.907751] env[61629]: _type = "Task" [ 958.907751] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.918306] env[61629]: DEBUG oslo_vmware.api [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354422, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.938584] env[61629]: INFO nova.scheduler.client.report [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Deleted allocation for migration 32006d8f-5a62-4120-b8f6-68dd596d1066 [ 959.057082] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b47abb29-b2d8-47fb-b890-be515ced201b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.238s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.070059] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10551351-5ae9-4592-8d0f-5d237c7af449 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.079335] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5038ef6b-f3c2-47c8-b633-ecf176366651 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.123249] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2462ef4c-073f-4907-9cff-d47d826fd432 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.132353] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbfcee8-a975-4623-b73f-f4e76713cd37 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.147034] env[61629]: DEBUG nova.compute.provider_tree [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.178317] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Acquiring lock "refresh_cache-87172592-f557-467f-ace2-805fd822681d" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.178453] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Acquired lock "refresh_cache-87172592-f557-467f-ace2-805fd822681d" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.178609] env[61629]: DEBUG nova.network.neutron [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 959.183192] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Acquiring lock "ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.235859] env[61629]: DEBUG oslo_concurrency.lockutils [None req-798d616f-f058-428b-9bab-5496ccea9bfd tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Lock "ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.885s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.236174] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Lock "ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.053s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.236398] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Acquiring lock "ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.236605] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Lock "ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.236780] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Lock "ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.240074] env[61629]: INFO nova.compute.manager [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Terminating instance [ 959.240738] env[61629]: DEBUG nova.compute.manager [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 959.240928] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 959.241788] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07d4dc6-8ded-419e-867c-c0e122211793 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.258948] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 959.261879] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d7a6639-58e5-4991-b815-d3633be27423 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.263240] env[61629]: DEBUG oslo_vmware.api [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.268169] env[61629]: DEBUG oslo_vmware.api [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Waiting for the task: (returnval){ [ 959.268169] env[61629]: value = "task-1354423" [ 959.268169] env[61629]: _type = "Task" [ 959.268169] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.275296] env[61629]: DEBUG oslo_vmware.api [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354423, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.420405] env[61629]: DEBUG oslo_vmware.api [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354422, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202857} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.420690] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 959.420916] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 959.421132] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 959.421319] env[61629]: INFO nova.compute.manager [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Took 1.11 seconds to destroy the instance on the hypervisor. [ 959.421593] env[61629]: DEBUG oslo.service.loopingcall [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 959.421795] env[61629]: DEBUG nova.compute.manager [-] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 959.421891] env[61629]: DEBUG nova.network.neutron [-] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 959.435437] env[61629]: DEBUG nova.network.neutron [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 959.446031] env[61629]: DEBUG oslo_concurrency.lockutils [None req-07bba802-d57d-48fe-be33-1f48273ef4b9 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "7cf87381-235e-449b-8269-61c2d4033028" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 17.528s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.651269] env[61629]: DEBUG nova.scheduler.client.report [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 959.672895] env[61629]: DEBUG nova.network.neutron [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Updating instance_info_cache with network_info: [{"id": "ff773335-988b-491d-bfab-ce5568c05be8", "address": "fa:16:3e:c5:7d:f6", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff773335-98", "ovs_interfaceid": "ff773335-988b-491d-bfab-ce5568c05be8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.729676] env[61629]: DEBUG nova.network.neutron [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 959.764892] env[61629]: DEBUG oslo_vmware.api [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.776776] env[61629]: DEBUG oslo_vmware.api [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354423, 'name': PowerOffVM_Task, 'duration_secs': 0.18357} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.777101] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 959.777254] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 959.777518] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-837f93a9-11e1-4eb2-95a0-e02a816a742e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.836929] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 959.837230] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 959.837533] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Deleting the datastore file [datastore1] ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 959.837875] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-787cd669-6893-4972-abab-c71a38371a4b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.845136] env[61629]: DEBUG oslo_vmware.api [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Waiting for the task: (returnval){ [ 959.845136] env[61629]: value = "task-1354425" [ 959.845136] env[61629]: _type = "Task" [ 959.845136] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.853679] env[61629]: DEBUG oslo_vmware.api [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354425, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.063718] env[61629]: DEBUG nova.network.neutron [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Updating instance_info_cache with network_info: [{"id": "5edebde4-4d2e-4f37-b5c1-b7edf504e2d9", "address": "fa:16:3e:58:29:15", "network": {"id": "963544d3-eb37-4928-a24e-3736ae8681c6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-262086963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19ad861ed9724d2f9d39100c9044a94b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f204ad5-8a45-4372-80ba-010fe0f9a337", "external-id": "nsx-vlan-transportzone-593", "segmentation_id": 593, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5edebde4-4d", "ovs_interfaceid": "5edebde4-4d2e-4f37-b5c1-b7edf504e2d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.088244] env[61629]: DEBUG oslo_concurrency.lockutils [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.088559] env[61629]: DEBUG oslo_concurrency.lockutils [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.088865] env[61629]: DEBUG oslo_concurrency.lockutils [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.089363] env[61629]: DEBUG oslo_concurrency.lockutils [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.089363] env[61629]: DEBUG oslo_concurrency.lockutils [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.091357] env[61629]: INFO nova.compute.manager [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Terminating instance [ 960.093777] env[61629]: DEBUG nova.compute.manager [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 960.093777] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 960.094244] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54cfb4df-6990-4480-a304-e84655f23b0a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.099663] env[61629]: DEBUG nova.compute.manager [req-cc3f3806-112b-4ad9-92d9-17057dc519c2 req-ffa6a6dd-aa5a-4a50-9c61-ae81e9f6aa5f service nova] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Received event network-vif-deleted-ed466750-9924-4d59-a6a8-bc0bdb462dfe {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 960.102952] env[61629]: INFO nova.compute.manager [req-cc3f3806-112b-4ad9-92d9-17057dc519c2 req-ffa6a6dd-aa5a-4a50-9c61-ae81e9f6aa5f service nova] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Neutron deleted interface ed466750-9924-4d59-a6a8-bc0bdb462dfe; detaching it from the instance and deleting it from the info cache [ 960.102952] env[61629]: DEBUG nova.network.neutron [req-cc3f3806-112b-4ad9-92d9-17057dc519c2 req-ffa6a6dd-aa5a-4a50-9c61-ae81e9f6aa5f service nova] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.108846] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 960.108846] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78f9038f-9a12-4104-922a-35ad59f36067 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.113436] env[61629]: DEBUG oslo_vmware.api [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for the task: (returnval){ [ 960.113436] env[61629]: value = "task-1354426" [ 960.113436] env[61629]: _type = "Task" [ 960.113436] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.121984] env[61629]: DEBUG oslo_vmware.api [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354426, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.156928] env[61629]: DEBUG oslo_concurrency.lockutils [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.788s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.175914] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "refresh_cache-a08e5762-5307-4dd8-a025-a1cdfd43025e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.176297] env[61629]: DEBUG nova.compute.manager [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Instance network_info: |[{"id": "ff773335-988b-491d-bfab-ce5568c05be8", "address": "fa:16:3e:c5:7d:f6", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff773335-98", "ovs_interfaceid": "ff773335-988b-491d-bfab-ce5568c05be8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 960.176769] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:7d:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff773335-988b-491d-bfab-ce5568c05be8', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 960.185079] env[61629]: DEBUG oslo.service.loopingcall [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 960.186354] env[61629]: INFO nova.scheduler.client.report [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleted allocations for instance 2ce60374-7baf-4d27-afbd-dcfaf6600a78 [ 960.191426] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 960.191426] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0db772a-03bc-4fb0-95d8-5e1b1f5b37ea {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.217022] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 960.217022] env[61629]: value = "task-1354427" [ 960.217022] env[61629]: _type = "Task" [ 960.217022] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.224871] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354427, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.263755] env[61629]: DEBUG oslo_vmware.api [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354412, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.356388] env[61629]: DEBUG oslo_vmware.api [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Task: {'id': task-1354425, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214086} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.356541] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 960.356749] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 960.356953] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 960.357173] env[61629]: INFO nova.compute.manager [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 960.357397] env[61629]: DEBUG oslo.service.loopingcall [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 960.357604] env[61629]: DEBUG nova.compute.manager [-] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 960.357701] env[61629]: DEBUG nova.network.neutron [-] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 960.470175] env[61629]: DEBUG nova.network.neutron [-] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.570014] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Releasing lock "refresh_cache-87172592-f557-467f-ace2-805fd822681d" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.570014] env[61629]: DEBUG nova.compute.manager [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Instance network_info: |[{"id": "5edebde4-4d2e-4f37-b5c1-b7edf504e2d9", "address": "fa:16:3e:58:29:15", "network": {"id": "963544d3-eb37-4928-a24e-3736ae8681c6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-262086963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19ad861ed9724d2f9d39100c9044a94b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f204ad5-8a45-4372-80ba-010fe0f9a337", "external-id": "nsx-vlan-transportzone-593", "segmentation_id": 593, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5edebde4-4d", "ovs_interfaceid": "5edebde4-4d2e-4f37-b5c1-b7edf504e2d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 960.570014] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:29:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2f204ad5-8a45-4372-80ba-010fe0f9a337', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5edebde4-4d2e-4f37-b5c1-b7edf504e2d9', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 960.579435] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Creating folder: Project (19ad861ed9724d2f9d39100c9044a94b). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 960.581835] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f737e67-ee7a-4b05-936f-292839d319b9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.594021] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Created folder: Project (19ad861ed9724d2f9d39100c9044a94b) in parent group-v288443. [ 960.594021] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Creating folder: Instances. Parent ref: group-v288557. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 960.594021] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-682566f3-23ae-4134-a4db-eb70ad20da3c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.604497] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Created folder: Instances in parent group-v288557. [ 960.604497] env[61629]: DEBUG oslo.service.loopingcall [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 960.604497] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd50a32b-8d87-41cc-81a8-69970ed9e598 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.606972] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87172592-f557-467f-ace2-805fd822681d] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 960.608361] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6af05c8f-d763-4930-a76c-aecb136cd014 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.627688] env[61629]: DEBUG nova.compute.manager [req-ca5bfe5d-63f8-4251-8343-2836740c5951 req-0b0089ad-7d44-4215-a10f-f308a6edca5c service nova] [instance: 87172592-f557-467f-ace2-805fd822681d] Received event network-changed-5edebde4-4d2e-4f37-b5c1-b7edf504e2d9 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 960.628038] env[61629]: DEBUG nova.compute.manager [req-ca5bfe5d-63f8-4251-8343-2836740c5951 req-0b0089ad-7d44-4215-a10f-f308a6edca5c service nova] [instance: 87172592-f557-467f-ace2-805fd822681d] Refreshing instance network info cache due to event network-changed-5edebde4-4d2e-4f37-b5c1-b7edf504e2d9. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 960.630020] env[61629]: DEBUG oslo_concurrency.lockutils [req-ca5bfe5d-63f8-4251-8343-2836740c5951 req-0b0089ad-7d44-4215-a10f-f308a6edca5c service nova] Acquiring lock "refresh_cache-87172592-f557-467f-ace2-805fd822681d" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.630020] env[61629]: DEBUG oslo_concurrency.lockutils [req-ca5bfe5d-63f8-4251-8343-2836740c5951 req-0b0089ad-7d44-4215-a10f-f308a6edca5c service nova] Acquired lock "refresh_cache-87172592-f557-467f-ace2-805fd822681d" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.630020] env[61629]: DEBUG nova.network.neutron [req-ca5bfe5d-63f8-4251-8343-2836740c5951 req-0b0089ad-7d44-4215-a10f-f308a6edca5c service nova] [instance: 87172592-f557-467f-ace2-805fd822681d] Refreshing network info cache for port 5edebde4-4d2e-4f37-b5c1-b7edf504e2d9 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 960.641164] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c151ddf4-b19c-4346-b6f3-6c586c122303 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.654898] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 960.654898] env[61629]: value = "task-1354430" [ 960.654898] env[61629]: _type = "Task" [ 960.654898] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.662875] env[61629]: DEBUG oslo_vmware.api [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354426, 'name': PowerOffVM_Task, 'duration_secs': 0.38097} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.663198] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 960.663410] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 960.664088] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e1db4d8-990c-4a2e-8c2c-550f3525de91 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.669362] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354430, 'name': CreateVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.680063] env[61629]: DEBUG nova.compute.manager [req-cc3f3806-112b-4ad9-92d9-17057dc519c2 req-ffa6a6dd-aa5a-4a50-9c61-ae81e9f6aa5f service nova] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Detach interface failed, port_id=ed466750-9924-4d59-a6a8-bc0bdb462dfe, reason: Instance 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52 could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 960.695257] env[61629]: DEBUG oslo_concurrency.lockutils [None req-830a2a20-bc03-4026-a5f9-99e3222d5c58 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "2ce60374-7baf-4d27-afbd-dcfaf6600a78" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.596s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.728346] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354427, 'name': CreateVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.760164] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 960.760437] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 960.760653] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Deleting the datastore file [datastore1] 274e3437-eacd-4299-9c27-97bbb0ebf1c1 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 960.760935] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abe9f1bc-2dfa-4e10-970b-e31c136e5ba6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.766172] env[61629]: DEBUG oslo_vmware.api [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354412, 'name': ReconfigVM_Task, 'duration_secs': 5.761713} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.766910] env[61629]: DEBUG oslo_concurrency.lockutils [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.768815] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Reconfigured VM to detach interface {{(pid=61629) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 960.771169] env[61629]: DEBUG oslo_vmware.api [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for the task: (returnval){ [ 960.771169] env[61629]: value = "task-1354432" [ 960.771169] env[61629]: _type = "Task" [ 960.771169] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.781507] env[61629]: DEBUG oslo_vmware.api [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354432, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.972756] env[61629]: INFO nova.compute.manager [-] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Took 1.55 seconds to deallocate network for instance. [ 961.039931] env[61629]: DEBUG oslo_concurrency.lockutils [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "7cf87381-235e-449b-8269-61c2d4033028" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.040284] env[61629]: DEBUG oslo_concurrency.lockutils [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "7cf87381-235e-449b-8269-61c2d4033028" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.040558] env[61629]: DEBUG oslo_concurrency.lockutils [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "7cf87381-235e-449b-8269-61c2d4033028-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.040764] env[61629]: DEBUG oslo_concurrency.lockutils [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "7cf87381-235e-449b-8269-61c2d4033028-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.041095] env[61629]: DEBUG oslo_concurrency.lockutils [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "7cf87381-235e-449b-8269-61c2d4033028-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.043423] env[61629]: INFO nova.compute.manager [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Terminating instance [ 961.046485] env[61629]: DEBUG nova.compute.manager [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 961.046724] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 961.047979] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7c677e-b43c-4838-832e-7ae066b0c5bc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.056180] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 961.056437] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f180632d-4ef1-47c4-83b1-f04d743d5563 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.063022] env[61629]: DEBUG oslo_vmware.api [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 961.063022] env[61629]: value = "task-1354433" [ 961.063022] env[61629]: _type = "Task" [ 961.063022] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.072245] env[61629]: DEBUG oslo_vmware.api [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354433, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.165229] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354430, 'name': CreateVM_Task, 'duration_secs': 0.447601} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.165229] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87172592-f557-467f-ace2-805fd822681d] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 961.165854] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.166053] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.166544] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 961.166720] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-113a8beb-4365-4d48-ad33-430a30379bde {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.173392] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Waiting for the task: (returnval){ [ 961.173392] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]529cdbd4-7fda-1775-2df2-e2460737e6fc" [ 961.173392] env[61629]: _type = "Task" [ 961.173392] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.184209] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]529cdbd4-7fda-1775-2df2-e2460737e6fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.228404] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354427, 'name': CreateVM_Task, 'duration_secs': 0.532236} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.228577] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 961.229279] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.278815] env[61629]: DEBUG nova.network.neutron [-] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.288525] env[61629]: DEBUG oslo_vmware.api [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Task: {'id': task-1354432, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212341} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.291830] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 961.291830] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 961.291830] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 961.291830] env[61629]: INFO nova.compute.manager [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Took 1.20 seconds to destroy the instance on the hypervisor. [ 961.291830] env[61629]: DEBUG oslo.service.loopingcall [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 961.292206] env[61629]: DEBUG nova.compute.manager [-] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 961.292206] env[61629]: DEBUG nova.network.neutron [-] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 961.480212] env[61629]: DEBUG oslo_concurrency.lockutils [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.483519] env[61629]: DEBUG oslo_concurrency.lockutils [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.483519] env[61629]: DEBUG nova.objects.instance [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lazy-loading 'resources' on Instance uuid 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 961.575592] env[61629]: DEBUG oslo_vmware.api [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354433, 'name': PowerOffVM_Task, 'duration_secs': 0.346645} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.575824] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 961.576019] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 961.576295] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60096dc5-1d3e-4aa6-a2e0-52074919fd38 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.666450] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 961.666839] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 961.667441] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Deleting the datastore file [datastore1] 7cf87381-235e-449b-8269-61c2d4033028 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 961.667784] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5dacc64a-b1da-419b-abb7-047a88e97531 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.678851] env[61629]: DEBUG oslo_vmware.api [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 961.678851] env[61629]: value = "task-1354435" [ 961.678851] env[61629]: _type = "Task" [ 961.678851] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.686689] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]529cdbd4-7fda-1775-2df2-e2460737e6fc, 'name': SearchDatastore_Task, 'duration_secs': 0.012895} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.687703] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.688120] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 961.688583] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.688788] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.689039] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 961.692738] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.693023] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 961.693318] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1e37a18-e8ed-4deb-98ce-acfa3ce8019f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.695890] env[61629]: DEBUG oslo_vmware.api [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354435, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.696119] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ff68f05-818b-488d-bf7d-c9855b3e6ea1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.704987] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 961.704987] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52e31f43-0730-0915-0cf3-caf2bef5274e" [ 961.704987] env[61629]: _type = "Task" [ 961.704987] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.711316] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 961.711614] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 961.715480] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5347b92-f548-4dfb-a384-b3132b283e23 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.718468] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52e31f43-0730-0915-0cf3-caf2bef5274e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.721825] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Waiting for the task: (returnval){ [ 961.721825] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5274f3d7-63b5-7c7a-a461-ae53e1a7c808" [ 961.721825] env[61629]: _type = "Task" [ 961.721825] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.731040] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5274f3d7-63b5-7c7a-a461-ae53e1a7c808, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.782845] env[61629]: INFO nova.compute.manager [-] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Took 1.42 seconds to deallocate network for instance. [ 961.806755] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "3994458a-195a-478c-b6d0-d8e36df989a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.807020] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "3994458a-195a-478c-b6d0-d8e36df989a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.896994] env[61629]: DEBUG nova.network.neutron [req-ca5bfe5d-63f8-4251-8343-2836740c5951 req-0b0089ad-7d44-4215-a10f-f308a6edca5c service nova] [instance: 87172592-f557-467f-ace2-805fd822681d] Updated VIF entry in instance network info cache for port 5edebde4-4d2e-4f37-b5c1-b7edf504e2d9. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 961.897404] env[61629]: DEBUG nova.network.neutron [req-ca5bfe5d-63f8-4251-8343-2836740c5951 req-0b0089ad-7d44-4215-a10f-f308a6edca5c service nova] [instance: 87172592-f557-467f-ace2-805fd822681d] Updating instance_info_cache with network_info: [{"id": "5edebde4-4d2e-4f37-b5c1-b7edf504e2d9", "address": "fa:16:3e:58:29:15", "network": {"id": "963544d3-eb37-4928-a24e-3736ae8681c6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-262086963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19ad861ed9724d2f9d39100c9044a94b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f204ad5-8a45-4372-80ba-010fe0f9a337", "external-id": "nsx-vlan-transportzone-593", "segmentation_id": 593, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5edebde4-4d", "ovs_interfaceid": "5edebde4-4d2e-4f37-b5c1-b7edf504e2d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.155406] env[61629]: DEBUG nova.compute.manager [req-49b46984-b330-422b-be36-0cf5ec9c4ddf req-ebfa59a3-3db6-42b6-8eb5-7b5549a5f431 service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Received event network-vif-deleted-5bb08edd-3639-401f-9e54-26abd98b246e {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 962.156399] env[61629]: INFO nova.compute.manager [req-49b46984-b330-422b-be36-0cf5ec9c4ddf req-ebfa59a3-3db6-42b6-8eb5-7b5549a5f431 service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Neutron deleted interface 5bb08edd-3639-401f-9e54-26abd98b246e; detaching it from the instance and deleting it from the info cache [ 962.156399] env[61629]: DEBUG nova.network.neutron [req-49b46984-b330-422b-be36-0cf5ec9c4ddf req-ebfa59a3-3db6-42b6-8eb5-7b5549a5f431 service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.192578] env[61629]: DEBUG oslo_vmware.api [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354435, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.257953} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.193434] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 962.193727] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 962.193986] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 962.194229] env[61629]: INFO nova.compute.manager [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Took 1.15 seconds to destroy the instance on the hypervisor. [ 962.194493] env[61629]: DEBUG oslo.service.loopingcall [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 962.195046] env[61629]: DEBUG nova.compute.manager [-] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 962.195154] env[61629]: DEBUG nova.network.neutron [-] [instance: 7cf87381-235e-449b-8269-61c2d4033028] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 962.223705] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52e31f43-0730-0915-0cf3-caf2bef5274e, 'name': SearchDatastore_Task, 'duration_secs': 0.018185} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.224285] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.224677] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 962.224929] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.237490] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5274f3d7-63b5-7c7a-a461-ae53e1a7c808, 'name': SearchDatastore_Task, 'duration_secs': 0.009115} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.243081] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b80ef99-361f-41a3-b1f1-d45b3885d25c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.257994] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Waiting for the task: (returnval){ [ 962.257994] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52940f0c-8810-0db1-9cac-c5802a66a18c" [ 962.257994] env[61629]: _type = "Task" [ 962.257994] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.259221] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d4f99c-ff2a-449f-87ce-bc69098fa612 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.272592] env[61629]: DEBUG oslo_concurrency.lockutils [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.272719] env[61629]: DEBUG oslo_concurrency.lockutils [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquired lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.272890] env[61629]: DEBUG nova.network.neutron [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 962.274835] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52940f0c-8810-0db1-9cac-c5802a66a18c, 'name': SearchDatastore_Task, 'duration_secs': 0.012739} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.279771] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.280087] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 87172592-f557-467f-ace2-805fd822681d/87172592-f557-467f-ace2-805fd822681d.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 962.280658] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.280863] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 962.281407] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1b0c9fe-9f75-4d64-887f-cfeba6cf6d21 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.285418] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caeccf5b-8636-453c-89e8-cb4ec5893756 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.290829] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0480cc48-9433-4b1f-a171-d67132fe5bdf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.297392] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.325760] env[61629]: DEBUG nova.compute.manager [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 962.332167] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e307a7b2-6844-400d-a0f9-db72d99452f8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.334955] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Waiting for the task: (returnval){ [ 962.334955] env[61629]: value = "task-1354436" [ 962.334955] env[61629]: _type = "Task" [ 962.334955] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.335211] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 962.335332] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 962.336435] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99a1852e-8aeb-4ee7-9d11-096e1d85f325 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.346850] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f502a908-918a-4b35-bf9f-740ada37448e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.354950] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 962.354950] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5240946e-bf13-88d9-f4bf-5924c5e74427" [ 962.354950] env[61629]: _type = "Task" [ 962.354950] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.355505] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354436, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.367733] env[61629]: DEBUG nova.compute.provider_tree [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 962.375412] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5240946e-bf13-88d9-f4bf-5924c5e74427, 'name': SearchDatastore_Task, 'duration_secs': 0.011165} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.376261] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20f2a2dd-5421-4a8d-81e8-4ce6532552c1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.381934] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 962.381934] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52e4bbe0-f855-c5f9-bcc9-c768d4e44e80" [ 962.381934] env[61629]: _type = "Task" [ 962.381934] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.390472] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52e4bbe0-f855-c5f9-bcc9-c768d4e44e80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.399948] env[61629]: DEBUG oslo_concurrency.lockutils [req-ca5bfe5d-63f8-4251-8343-2836740c5951 req-0b0089ad-7d44-4215-a10f-f308a6edca5c service nova] Releasing lock "refresh_cache-87172592-f557-467f-ace2-805fd822681d" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.440368] env[61629]: DEBUG nova.network.neutron [-] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.573029] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "22f71f92-ca9a-4b97-a652-3f34a0dabde2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.573029] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "22f71f92-ca9a-4b97-a652-3f34a0dabde2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.642355] env[61629]: DEBUG nova.compute.manager [req-2016069d-85e8-4189-9069-cc587aff1459 req-54e6d744-c415-4654-8719-52759406af79 service nova] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Received event network-vif-deleted-734452b5-1029-4186-9c1f-a26f48e3b47f {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 962.669225] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-578a144d-a72d-4bce-baf3-87e478306e0f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.681400] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98effa50-33cf-47c8-86d6-decd538ae7aa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.710642] env[61629]: DEBUG nova.compute.manager [req-49b46984-b330-422b-be36-0cf5ec9c4ddf req-ebfa59a3-3db6-42b6-8eb5-7b5549a5f431 service nova] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Detach interface failed, port_id=5bb08edd-3639-401f-9e54-26abd98b246e, reason: Instance 274e3437-eacd-4299-9c27-97bbb0ebf1c1 could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 962.758258] env[61629]: DEBUG oslo_concurrency.lockutils [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.758258] env[61629]: DEBUG oslo_concurrency.lockutils [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.758258] env[61629]: DEBUG oslo_concurrency.lockutils [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.758258] env[61629]: DEBUG oslo_concurrency.lockutils [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.758258] env[61629]: DEBUG oslo_concurrency.lockutils [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.761031] env[61629]: INFO nova.compute.manager [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Terminating instance [ 962.763666] env[61629]: DEBUG nova.compute.manager [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 962.763966] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 962.765931] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53fa0d01-a8a9-4dc5-9adf-7e4c03aca87c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.775600] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 962.779012] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fc87fcc-cf93-4277-a52c-1b2d00425e1e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.785927] env[61629]: DEBUG oslo_vmware.api [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 962.785927] env[61629]: value = "task-1354437" [ 962.785927] env[61629]: _type = "Task" [ 962.785927] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.796027] env[61629]: DEBUG oslo_vmware.api [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354437, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.847288] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354436, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.856874] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.891967] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52e4bbe0-f855-c5f9-bcc9-c768d4e44e80, 'name': SearchDatastore_Task, 'duration_secs': 0.00965} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.892558] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.892558] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] a08e5762-5307-4dd8-a025-a1cdfd43025e/a08e5762-5307-4dd8-a025-a1cdfd43025e.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 962.893373] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-747e58f7-b992-4b81-97ed-9fe1652c2599 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.898863] env[61629]: ERROR nova.scheduler.client.report [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [req-1cd551bc-1418-48c9-84bc-3e031f3909a8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID d075eff1-6f77-44a8-824e-16f3e03b4063. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1cd551bc-1418-48c9-84bc-3e031f3909a8"}]} [ 962.902624] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 962.902624] env[61629]: value = "task-1354438" [ 962.902624] env[61629]: _type = "Task" [ 962.902624] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.912634] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.922235] env[61629]: DEBUG nova.scheduler.client.report [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Refreshing inventories for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 962.940266] env[61629]: DEBUG nova.scheduler.client.report [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Updating ProviderTree inventory for provider d075eff1-6f77-44a8-824e-16f3e03b4063 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 962.940714] env[61629]: DEBUG nova.compute.provider_tree [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 962.943361] env[61629]: INFO nova.compute.manager [-] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Took 1.65 seconds to deallocate network for instance. [ 962.954912] env[61629]: DEBUG nova.scheduler.client.report [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Refreshing aggregate associations for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063, aggregates: None {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 962.976797] env[61629]: DEBUG nova.scheduler.client.report [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Refreshing trait associations for resource provider d075eff1-6f77-44a8-824e-16f3e03b4063, traits: COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61629) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 963.014143] env[61629]: DEBUG nova.network.neutron [-] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.075239] env[61629]: DEBUG nova.compute.manager [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 963.124756] env[61629]: INFO nova.network.neutron [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Port d666a690-afc1-4ce0-a878-192338b5dc0d from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 963.125205] env[61629]: DEBUG nova.network.neutron [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Updating instance_info_cache with network_info: [{"id": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "address": "fa:16:3e:f8:02:ef", "network": {"id": "74993df5-f495-415f-bb5a-87983f0b2da1", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-767185375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e7fced3a50d4821b42cf087d8111cb7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa193ab2f-5a", "ovs_interfaceid": "a193ab2f-5a9d-4411-94f9-cc5834b60795", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.231720] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed457c7-73d8-4638-aeea-b2cd564d445f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.240507] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2630bdfc-8c47-49f8-aeca-dfd6644a7583 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.273537] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34812d50-0423-43c5-8021-ebdbabc037a1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.282768] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37153bbc-d1c7-410c-874f-c98e9782e849 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.300585] env[61629]: DEBUG nova.compute.provider_tree [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 963.305467] env[61629]: DEBUG oslo_vmware.api [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354437, 'name': PowerOffVM_Task, 'duration_secs': 0.196084} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.306118] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 963.306310] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 963.306591] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05b82009-5669-43c7-8b3a-1ba6ef726dd1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.348338] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354436, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.636445} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.348338] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 87172592-f557-467f-ace2-805fd822681d/87172592-f557-467f-ace2-805fd822681d.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 963.348632] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 963.348907] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87fa1040-d3c0-4644-8bb0-aa056933756e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.355170] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Waiting for the task: (returnval){ [ 963.355170] env[61629]: value = "task-1354440" [ 963.355170] env[61629]: _type = "Task" [ 963.355170] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.364489] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354440, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.394974] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 963.395281] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 963.395585] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Deleting the datastore file [datastore1] 7c3e9d0f-88a8-41fe-bf61-e3db34d36928 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 963.395924] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3cf667e-854b-4604-841d-34ba6d24a3c4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.402968] env[61629]: DEBUG oslo_vmware.api [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 963.402968] env[61629]: value = "task-1354441" [ 963.402968] env[61629]: _type = "Task" [ 963.402968] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.413305] env[61629]: DEBUG oslo_vmware.api [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.416451] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354438, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464135} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.416706] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] a08e5762-5307-4dd8-a025-a1cdfd43025e/a08e5762-5307-4dd8-a025-a1cdfd43025e.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 963.416930] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 963.417188] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4ccb460-e6cd-4af2-b382-762333a270c9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.423049] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 963.423049] env[61629]: value = "task-1354442" [ 963.423049] env[61629]: _type = "Task" [ 963.423049] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.430958] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354442, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.449904] env[61629]: DEBUG oslo_concurrency.lockutils [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.517827] env[61629]: INFO nova.compute.manager [-] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Took 1.32 seconds to deallocate network for instance. [ 963.599065] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.628494] env[61629]: DEBUG oslo_concurrency.lockutils [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Releasing lock "refresh_cache-7c3e9d0f-88a8-41fe-bf61-e3db34d36928" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.836436] env[61629]: DEBUG nova.scheduler.client.report [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Updated inventory for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with generation 112 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 963.836711] env[61629]: DEBUG nova.compute.provider_tree [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Updating resource provider d075eff1-6f77-44a8-824e-16f3e03b4063 generation from 112 to 113 during operation: update_inventory {{(pid=61629) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 963.836897] env[61629]: DEBUG nova.compute.provider_tree [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Updating inventory in ProviderTree for provider d075eff1-6f77-44a8-824e-16f3e03b4063 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 963.866797] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354440, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066116} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.867086] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 963.867950] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5712c4-d8ce-4548-a4f5-3fc38d40c756 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.889734] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 87172592-f557-467f-ace2-805fd822681d/87172592-f557-467f-ace2-805fd822681d.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 963.890289] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0969b31a-33c1-4138-aae8-dcb02d7fa096 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.914128] env[61629]: DEBUG oslo_vmware.api [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354441, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225843} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.915319] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.915521] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 963.915704] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 963.915890] env[61629]: INFO nova.compute.manager [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Took 1.15 seconds to destroy the instance on the hypervisor. [ 963.916140] env[61629]: DEBUG oslo.service.loopingcall [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 963.916393] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Waiting for the task: (returnval){ [ 963.916393] env[61629]: value = "task-1354443" [ 963.916393] env[61629]: _type = "Task" [ 963.916393] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.916578] env[61629]: DEBUG nova.compute.manager [-] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 963.916672] env[61629]: DEBUG nova.network.neutron [-] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 963.925659] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354443, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.932495] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354442, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066508} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.932750] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 963.933491] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0617ca4-4f27-4b4f-9992-dca8f02ca19c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.955963] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] a08e5762-5307-4dd8-a025-a1cdfd43025e/a08e5762-5307-4dd8-a025-a1cdfd43025e.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 963.958461] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2839067-e31e-4b7a-95f3-ca52f60e4204 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.977882] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 963.977882] env[61629]: value = "task-1354444" [ 963.977882] env[61629]: _type = "Task" [ 963.977882] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.985734] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354444, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.025325] env[61629]: DEBUG oslo_concurrency.lockutils [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.134221] env[61629]: DEBUG oslo_concurrency.lockutils [None req-234e5641-21c4-4afb-803f-074687bcfc5a tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "interface-7c3e9d0f-88a8-41fe-bf61-e3db34d36928-d666a690-afc1-4ce0-a878-192338b5dc0d" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.977s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.185670] env[61629]: DEBUG nova.compute.manager [req-bfea0f7c-16b3-4159-bb9d-3cd7e57eeb36 req-38c767f8-a96f-40ab-92a6-577e0c0f4776 service nova] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Received event network-vif-deleted-e28dd480-831a-49f0-804e-ad88763d3c24 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 964.341513] env[61629]: DEBUG oslo_concurrency.lockutils [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.861s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.343802] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.047s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.344126] env[61629]: DEBUG nova.objects.instance [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Lazy-loading 'resources' on Instance uuid ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 964.369276] env[61629]: INFO nova.scheduler.client.report [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Deleted allocations for instance 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52 [ 964.428156] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354443, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.488281] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354444, 'name': ReconfigVM_Task, 'duration_secs': 0.31432} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.488580] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Reconfigured VM instance instance-0000005c to attach disk [datastore2] a08e5762-5307-4dd8-a025-a1cdfd43025e/a08e5762-5307-4dd8-a025-a1cdfd43025e.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 964.489311] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-396a647d-60ef-47ed-a8ec-597f12034106 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.496071] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 964.496071] env[61629]: value = "task-1354445" [ 964.496071] env[61629]: _type = "Task" [ 964.496071] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.505153] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354445, 'name': Rename_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.876508] env[61629]: DEBUG oslo_concurrency.lockutils [None req-556417b2-13a1-4f2f-be0d-9aeda805a9d1 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.570s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.931085] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354443, 'name': ReconfigVM_Task, 'duration_secs': 0.727201} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.931406] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 87172592-f557-467f-ace2-805fd822681d/87172592-f557-467f-ace2-805fd822681d.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 964.932071] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-43f49696-fe8f-4f7a-b198-6672546dd417 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.942020] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Waiting for the task: (returnval){ [ 964.942020] env[61629]: value = "task-1354446" [ 964.942020] env[61629]: _type = "Task" [ 964.942020] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.948286] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354446, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.007563] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354445, 'name': Rename_Task, 'duration_secs': 0.14722} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.007563] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 965.010480] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3792ce69-d259-4936-ac29-58731f0b60d6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.016713] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 965.016713] env[61629]: value = "task-1354447" [ 965.016713] env[61629]: _type = "Task" [ 965.016713] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.025061] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354447, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.072072] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b82421-795a-4144-a07e-477f51fd2ca9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.080157] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8949586b-81b0-4fb0-85be-7fc8362da3bc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.110206] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bbfebca-c16e-4bab-b29f-278cb941fa75 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.117986] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c890b25-91c9-4d3c-8d90-aeaf59c87c88 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.133064] env[61629]: DEBUG nova.compute.provider_tree [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.214399] env[61629]: DEBUG nova.network.neutron [-] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.449838] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354446, 'name': Rename_Task, 'duration_secs': 0.157333} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.450268] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 965.450507] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3fdc710-2d7b-491c-9cd3-a9cb88d23350 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.456342] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Waiting for the task: (returnval){ [ 965.456342] env[61629]: value = "task-1354448" [ 965.456342] env[61629]: _type = "Task" [ 965.456342] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.463770] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354448, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.526356] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354447, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.636822] env[61629]: DEBUG nova.scheduler.client.report [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 965.718194] env[61629]: INFO nova.compute.manager [-] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Took 1.80 seconds to deallocate network for instance. [ 965.966836] env[61629]: DEBUG oslo_vmware.api [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354448, 'name': PowerOnVM_Task, 'duration_secs': 0.471435} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.966836] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 965.966836] env[61629]: INFO nova.compute.manager [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Took 7.95 seconds to spawn the instance on the hypervisor. [ 965.967117] env[61629]: DEBUG nova.compute.manager [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 965.967665] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e391821f-7613-4516-b430-97e591d1c778 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.026753] env[61629]: DEBUG oslo_vmware.api [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354447, 'name': PowerOnVM_Task, 'duration_secs': 0.611437} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.027226] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 966.027336] env[61629]: INFO nova.compute.manager [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Took 9.93 seconds to spawn the instance on the hypervisor. [ 966.027721] env[61629]: DEBUG nova.compute.manager [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 966.028230] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304f3acc-0997-4ebe-b2c1-8413cbb71158 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.142662] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.799s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.145061] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.288s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.146532] env[61629]: INFO nova.compute.claims [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 966.165249] env[61629]: INFO nova.scheduler.client.report [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Deleted allocations for instance ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e [ 966.211328] env[61629]: DEBUG nova.compute.manager [req-c9381c6c-dc91-406f-a14e-5913f88f724e req-dfcaac24-17ee-4051-8801-f47badea84fd service nova] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Received event network-vif-deleted-a193ab2f-5a9d-4411-94f9-cc5834b60795 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 966.225854] env[61629]: DEBUG oslo_concurrency.lockutils [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.486548] env[61629]: INFO nova.compute.manager [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Took 21.75 seconds to build instance. [ 966.550910] env[61629]: INFO nova.compute.manager [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Took 29.97 seconds to build instance. [ 966.672609] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e9cdc769-8945-47ea-89c6-a5f4c6c71835 tempest-ImagesNegativeTestJSON-320804497 tempest-ImagesNegativeTestJSON-320804497-project-member] Lock "ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.436s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.988745] env[61629]: DEBUG oslo_concurrency.lockutils [None req-0463bb85-f6ce-4369-9bdc-5fc213a14c19 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Lock "87172592-f557-467f-ace2-805fd822681d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.262s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.054673] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2112a06-8bb6-4131-88c8-3c8314eba578 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a08e5762-5307-4dd8-a025-a1cdfd43025e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.481s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.246408] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "a08e5762-5307-4dd8-a025-a1cdfd43025e" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.246667] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a08e5762-5307-4dd8-a025-a1cdfd43025e" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.246847] env[61629]: INFO nova.compute.manager [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Shelving [ 967.331871] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e1edbd-39bd-418c-a5e0-ee85cf411c95 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.339522] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed81f2a-c05a-4448-9a56-5556fa729017 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.368860] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a37bd1d-d68f-441c-8271-afb451ddddab {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.376482] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f554662a-5987-48aa-9971-e56b43b34eed {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.390130] env[61629]: DEBUG nova.compute.provider_tree [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 967.470706] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 967.470892] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 967.756102] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 967.756695] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca6d82bb-4ab3-4eac-9a7a-19d190a68b8e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.763953] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 967.763953] env[61629]: value = "task-1354449" [ 967.763953] env[61629]: _type = "Task" [ 967.763953] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.772778] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354449, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.894962] env[61629]: DEBUG nova.scheduler.client.report [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 967.978268] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 967.978268] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Starting heal instance info cache {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 968.161564] env[61629]: DEBUG oslo_concurrency.lockutils [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Acquiring lock "87172592-f557-467f-ace2-805fd822681d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.162023] env[61629]: DEBUG oslo_concurrency.lockutils [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Lock "87172592-f557-467f-ace2-805fd822681d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.162855] env[61629]: DEBUG oslo_concurrency.lockutils [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Acquiring lock "87172592-f557-467f-ace2-805fd822681d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.163492] env[61629]: DEBUG oslo_concurrency.lockutils [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Lock "87172592-f557-467f-ace2-805fd822681d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.164056] env[61629]: DEBUG oslo_concurrency.lockutils [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Lock "87172592-f557-467f-ace2-805fd822681d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.168788] env[61629]: INFO nova.compute.manager [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Terminating instance [ 968.175418] env[61629]: DEBUG nova.compute.manager [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 968.175418] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.175418] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c368e450-7469-463d-baba-c9682a51f6b4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.183289] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.183734] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1b009f9-198e-4c46-9224-51b0e6d3b7ba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.190654] env[61629]: DEBUG oslo_vmware.api [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Waiting for the task: (returnval){ [ 968.190654] env[61629]: value = "task-1354450" [ 968.190654] env[61629]: _type = "Task" [ 968.190654] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.200571] env[61629]: DEBUG oslo_vmware.api [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354450, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.274487] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354449, 'name': PowerOffVM_Task, 'duration_secs': 0.167199} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.274743] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 968.275683] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8265f557-e380-49eb-b162-41aad3cbf2e2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.295603] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4e5422-f1f7-4e07-b85a-25488a6dbc08 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.400976] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.256s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.401552] env[61629]: DEBUG nova.compute.manager [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 968.404867] env[61629]: DEBUG oslo_concurrency.lockutils [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.954s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.404867] env[61629]: DEBUG nova.objects.instance [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lazy-loading 'resources' on Instance uuid 274e3437-eacd-4299-9c27-97bbb0ebf1c1 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.511956] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.512207] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquired lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.512420] env[61629]: DEBUG nova.network.neutron [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Forcefully refreshing network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 968.701900] env[61629]: DEBUG oslo_vmware.api [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354450, 'name': PowerOffVM_Task, 'duration_secs': 0.198821} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.702332] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 968.703095] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 968.703095] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dde9c86a-e355-4139-8af2-1db47597ec33 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.771029] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 968.771397] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 968.771536] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Deleting the datastore file [datastore2] 87172592-f557-467f-ace2-805fd822681d {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 968.771826] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b400770-7a69-48ea-8737-5c5711a399d7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.779589] env[61629]: DEBUG oslo_vmware.api [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Waiting for the task: (returnval){ [ 968.779589] env[61629]: value = "task-1354452" [ 968.779589] env[61629]: _type = "Task" [ 968.779589] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.788233] env[61629]: DEBUG oslo_vmware.api [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.806513] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Creating Snapshot of the VM instance {{(pid=61629) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 968.806864] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f0315872-1d88-4b35-9920-20df27152ad4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.813858] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 968.813858] env[61629]: value = "task-1354453" [ 968.813858] env[61629]: _type = "Task" [ 968.813858] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.822596] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354453, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.829959] env[61629]: DEBUG oslo_concurrency.lockutils [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.830425] env[61629]: DEBUG oslo_concurrency.lockutils [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.907352] env[61629]: DEBUG nova.compute.utils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 968.911959] env[61629]: DEBUG nova.compute.manager [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 968.912212] env[61629]: DEBUG nova.network.neutron [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 968.976539] env[61629]: DEBUG nova.policy [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be81178f7a914988a54581c283e2e76a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6d1f876ee054beb89ca0eb0776ddcd5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 969.104599] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05882ca-037f-486b-99c8-41064ef73b60 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.113022] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1f5ad0-4a93-4cf3-ac5a-accf85aa16fa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.146772] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90006149-5c7a-46ec-be63-bfabbfcb8ef4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.155103] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892898ea-bc32-401a-a2f3-4399b2c7b490 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.169381] env[61629]: DEBUG nova.compute.provider_tree [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.289223] env[61629]: DEBUG oslo_vmware.api [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Task: {'id': task-1354452, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171881} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.289443] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.289635] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 969.289818] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.289995] env[61629]: INFO nova.compute.manager [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] [instance: 87172592-f557-467f-ace2-805fd822681d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 969.290541] env[61629]: DEBUG oslo.service.loopingcall [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 969.290541] env[61629]: DEBUG nova.compute.manager [-] [instance: 87172592-f557-467f-ace2-805fd822681d] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 969.290541] env[61629]: DEBUG nova.network.neutron [-] [instance: 87172592-f557-467f-ace2-805fd822681d] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 969.324113] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354453, 'name': CreateSnapshot_Task, 'duration_secs': 0.414192} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.324506] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Created Snapshot of the VM instance {{(pid=61629) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 969.325326] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392b54b5-0d0b-455a-b1fc-7f68b94a577e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.333441] env[61629]: DEBUG nova.compute.utils [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 969.369112] env[61629]: DEBUG nova.network.neutron [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Successfully created port: d3b1da99-eb4d-4c80-adfc-2aa87a264e1e {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 969.413913] env[61629]: DEBUG nova.compute.manager [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 969.673939] env[61629]: DEBUG nova.scheduler.client.report [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 969.842373] env[61629]: DEBUG oslo_concurrency.lockutils [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.848212] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Creating linked-clone VM from snapshot {{(pid=61629) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 969.848746] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a9e3f076-6f7d-4bd8-9dd9-d20eef326aaf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.858642] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 969.858642] env[61629]: value = "task-1354454" [ 969.858642] env[61629]: _type = "Task" [ 969.858642] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.867132] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354454, 'name': CloneVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.962344] env[61629]: DEBUG nova.compute.manager [req-99edd974-9cbd-4143-b78f-7267b5d75fae req-28d6dd64-804b-42c1-87cf-09b09c7bf04f service nova] [instance: 87172592-f557-467f-ace2-805fd822681d] Received event network-vif-deleted-5edebde4-4d2e-4f37-b5c1-b7edf504e2d9 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 969.962344] env[61629]: INFO nova.compute.manager [req-99edd974-9cbd-4143-b78f-7267b5d75fae req-28d6dd64-804b-42c1-87cf-09b09c7bf04f service nova] [instance: 87172592-f557-467f-ace2-805fd822681d] Neutron deleted interface 5edebde4-4d2e-4f37-b5c1-b7edf504e2d9; detaching it from the instance and deleting it from the info cache [ 969.962344] env[61629]: DEBUG nova.network.neutron [req-99edd974-9cbd-4143-b78f-7267b5d75fae req-28d6dd64-804b-42c1-87cf-09b09c7bf04f service nova] [instance: 87172592-f557-467f-ace2-805fd822681d] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.172878] env[61629]: DEBUG nova.network.neutron [-] [instance: 87172592-f557-467f-ace2-805fd822681d] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.184314] env[61629]: DEBUG oslo_concurrency.lockutils [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.777s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.184314] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.585s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.185797] env[61629]: INFO nova.compute.claims [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 970.212203] env[61629]: INFO nova.scheduler.client.report [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Deleted allocations for instance 274e3437-eacd-4299-9c27-97bbb0ebf1c1 [ 970.254581] env[61629]: DEBUG nova.network.neutron [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Updating instance_info_cache with network_info: [{"id": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "address": "fa:16:3e:7d:7e:9a", "network": {"id": "7ab21805-1836-4ac0-94d2-d715f9f3352e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1256584900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc5fe81fb0eb4820825cc8e97b8fe4f2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8a895f7-ad", "ovs_interfaceid": "b8a895f7-ad9d-4d49-8460-de82459d88f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.369970] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354454, 'name': CloneVM_Task} progress is 94%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.429868] env[61629]: DEBUG nova.compute.manager [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 970.459513] env[61629]: DEBUG nova.virt.hardware [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 970.459749] env[61629]: DEBUG nova.virt.hardware [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 970.459911] env[61629]: DEBUG nova.virt.hardware [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 970.460112] env[61629]: DEBUG nova.virt.hardware [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 970.460264] env[61629]: DEBUG nova.virt.hardware [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 970.460413] env[61629]: DEBUG nova.virt.hardware [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 970.460622] env[61629]: DEBUG nova.virt.hardware [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 970.460785] env[61629]: DEBUG nova.virt.hardware [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 970.460995] env[61629]: DEBUG nova.virt.hardware [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 970.461157] env[61629]: DEBUG nova.virt.hardware [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 970.461300] env[61629]: DEBUG nova.virt.hardware [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 970.462190] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8353d8fe-c715-444d-a351-b23d6c49169c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.469624] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-66520536-e8e7-4aa6-bbd4-1833a61ffac5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.472755] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973afe42-8677-4f33-941e-1a4a5fb63d4c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.491426] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed261248-a42c-4074-99ec-270ca39a3ddc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.517946] env[61629]: DEBUG nova.compute.manager [req-99edd974-9cbd-4143-b78f-7267b5d75fae req-28d6dd64-804b-42c1-87cf-09b09c7bf04f service nova] [instance: 87172592-f557-467f-ace2-805fd822681d] Detach interface failed, port_id=5edebde4-4d2e-4f37-b5c1-b7edf504e2d9, reason: Instance 87172592-f557-467f-ace2-805fd822681d could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 970.678432] env[61629]: INFO nova.compute.manager [-] [instance: 87172592-f557-467f-ace2-805fd822681d] Took 1.39 seconds to deallocate network for instance. [ 970.723865] env[61629]: DEBUG oslo_concurrency.lockutils [None req-539a319c-2137-4540-ac7d-3217de3c995b tempest-AttachVolumeNegativeTest-338529547 tempest-AttachVolumeNegativeTest-338529547-project-member] Lock "274e3437-eacd-4299-9c27-97bbb0ebf1c1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.635s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.756728] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Releasing lock "refresh_cache-fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.757121] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Updated the network info_cache for instance {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 970.757356] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.758389] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.759243] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.759486] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.759696] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.759863] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.759995] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61629) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 970.760287] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager.update_available_resource {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.872575] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354454, 'name': CloneVM_Task} progress is 94%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.940888] env[61629]: DEBUG oslo_concurrency.lockutils [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.941209] env[61629]: DEBUG oslo_concurrency.lockutils [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.941494] env[61629]: INFO nova.compute.manager [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Attaching volume 8806a247-abb8-469b-9cc7-f3e68e3d20de to /dev/sdb [ 970.978051] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def4eab7-fa60-41ab-9ebc-123adedff007 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.985304] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c94f46c-625d-4d0f-9192-c693ac4adf0a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.999180] env[61629]: DEBUG nova.virt.block_device [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Updating existing volume attachment record: 4047622d-9b34-41d8-be1b-cfe2c0484ab6 {{(pid=61629) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 971.185304] env[61629]: DEBUG oslo_concurrency.lockutils [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.264854] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.378974] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354454, 'name': CloneVM_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.434535] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-768e56cd-ff5a-4dd9-ae4f-a2ceb93c680a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.442920] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0b4bcf-7870-44f9-84ab-e65ad2814312 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.479030] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d35bf5d-8522-46cf-9967-9b6d583da3d1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.486876] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebe8743-9fc6-45e3-946a-78c2838691af {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.502362] env[61629]: DEBUG nova.compute.provider_tree [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.619214] env[61629]: DEBUG nova.compute.manager [req-7d2d6867-a4a0-4676-bf10-5b3d800485bd req-69620f81-fb6a-42ad-abca-e961b719f584 service nova] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Received event network-vif-plugged-d3b1da99-eb4d-4c80-adfc-2aa87a264e1e {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 971.619214] env[61629]: DEBUG oslo_concurrency.lockutils [req-7d2d6867-a4a0-4676-bf10-5b3d800485bd req-69620f81-fb6a-42ad-abca-e961b719f584 service nova] Acquiring lock "3994458a-195a-478c-b6d0-d8e36df989a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.619661] env[61629]: DEBUG oslo_concurrency.lockutils [req-7d2d6867-a4a0-4676-bf10-5b3d800485bd req-69620f81-fb6a-42ad-abca-e961b719f584 service nova] Lock "3994458a-195a-478c-b6d0-d8e36df989a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.620183] env[61629]: DEBUG oslo_concurrency.lockutils [req-7d2d6867-a4a0-4676-bf10-5b3d800485bd req-69620f81-fb6a-42ad-abca-e961b719f584 service nova] Lock "3994458a-195a-478c-b6d0-d8e36df989a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.620935] env[61629]: DEBUG nova.compute.manager [req-7d2d6867-a4a0-4676-bf10-5b3d800485bd req-69620f81-fb6a-42ad-abca-e961b719f584 service nova] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] No waiting events found dispatching network-vif-plugged-d3b1da99-eb4d-4c80-adfc-2aa87a264e1e {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 971.621770] env[61629]: WARNING nova.compute.manager [req-7d2d6867-a4a0-4676-bf10-5b3d800485bd req-69620f81-fb6a-42ad-abca-e961b719f584 service nova] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Received unexpected event network-vif-plugged-d3b1da99-eb4d-4c80-adfc-2aa87a264e1e for instance with vm_state building and task_state spawning. [ 971.878677] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354454, 'name': CloneVM_Task, 'duration_secs': 1.52649} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.880291] env[61629]: DEBUG nova.network.neutron [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Successfully updated port: d3b1da99-eb4d-4c80-adfc-2aa87a264e1e {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 971.882371] env[61629]: INFO nova.virt.vmwareapi.vmops [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Created linked-clone VM from snapshot [ 971.884271] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4954c60-67f8-4384-9f77-5a82f78fa542 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.902855] env[61629]: DEBUG nova.virt.vmwareapi.images [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Uploading image 6a4249c3-b2ae-449d-a87e-08a19b15fa29 {{(pid=61629) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 971.943274] env[61629]: DEBUG oslo_vmware.rw_handles [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 971.943274] env[61629]: value = "vm-288561" [ 971.943274] env[61629]: _type = "VirtualMachine" [ 971.943274] env[61629]: }. {{(pid=61629) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 971.943915] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e0e553db-b86d-4bd8-a98d-0c0243f0a79f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.951199] env[61629]: DEBUG oslo_vmware.rw_handles [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lease: (returnval){ [ 971.951199] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52e564b8-8274-109f-4468-90f74a7dbf8c" [ 971.951199] env[61629]: _type = "HttpNfcLease" [ 971.951199] env[61629]: } obtained for exporting VM: (result){ [ 971.951199] env[61629]: value = "vm-288561" [ 971.951199] env[61629]: _type = "VirtualMachine" [ 971.951199] env[61629]: }. {{(pid=61629) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 971.951547] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the lease: (returnval){ [ 971.951547] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52e564b8-8274-109f-4468-90f74a7dbf8c" [ 971.951547] env[61629]: _type = "HttpNfcLease" [ 971.951547] env[61629]: } to be ready. {{(pid=61629) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 971.958331] env[61629]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 971.958331] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52e564b8-8274-109f-4468-90f74a7dbf8c" [ 971.958331] env[61629]: _type = "HttpNfcLease" [ 971.958331] env[61629]: } is initializing. {{(pid=61629) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 972.007572] env[61629]: DEBUG nova.scheduler.client.report [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 972.383118] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "refresh_cache-3994458a-195a-478c-b6d0-d8e36df989a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.384026] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "refresh_cache-3994458a-195a-478c-b6d0-d8e36df989a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.384026] env[61629]: DEBUG nova.network.neutron [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 972.462023] env[61629]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 972.462023] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52e564b8-8274-109f-4468-90f74a7dbf8c" [ 972.462023] env[61629]: _type = "HttpNfcLease" [ 972.462023] env[61629]: } is ready. {{(pid=61629) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 972.462023] env[61629]: DEBUG oslo_vmware.rw_handles [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 972.462023] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52e564b8-8274-109f-4468-90f74a7dbf8c" [ 972.462023] env[61629]: _type = "HttpNfcLease" [ 972.462023] env[61629]: }. {{(pid=61629) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 972.462023] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6395be-5ccc-404a-b143-df2c6950216a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.470533] env[61629]: DEBUG oslo_vmware.rw_handles [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521ca043-2ea7-59f3-2189-848fc7666617/disk-0.vmdk from lease info. {{(pid=61629) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 972.471308] env[61629]: DEBUG oslo_vmware.rw_handles [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521ca043-2ea7-59f3-2189-848fc7666617/disk-0.vmdk for reading. {{(pid=61629) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 972.531321] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.348s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.531975] env[61629]: DEBUG nova.compute.manager [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 972.539418] env[61629]: DEBUG oslo_concurrency.lockutils [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.514s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.539620] env[61629]: DEBUG oslo_concurrency.lockutils [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.546602] env[61629]: DEBUG oslo_concurrency.lockutils [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.319s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.546602] env[61629]: DEBUG nova.objects.instance [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lazy-loading 'resources' on Instance uuid 7c3e9d0f-88a8-41fe-bf61-e3db34d36928 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.574934] env[61629]: INFO nova.scheduler.client.report [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Deleted allocations for instance 7cf87381-235e-449b-8269-61c2d4033028 [ 972.576082] env[61629]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-dc0719cc-b803-4a81-bac0-95539ce0e1ef {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.927184] env[61629]: DEBUG nova.network.neutron [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 973.048561] env[61629]: DEBUG nova.compute.utils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 973.053902] env[61629]: DEBUG nova.compute.manager [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 973.054794] env[61629]: DEBUG nova.network.neutron [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 973.089447] env[61629]: DEBUG oslo_concurrency.lockutils [None req-22954f5d-108a-4e25-8a6f-a3270bbb2be6 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "7cf87381-235e-449b-8269-61c2d4033028" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.049s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.148932] env[61629]: DEBUG nova.policy [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c62f9a7c8b5f4ef985880339407b46a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0578ce75c37942d4ba6c8b862ceb7d92', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 973.204321] env[61629]: DEBUG nova.network.neutron [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Updating instance_info_cache with network_info: [{"id": "d3b1da99-eb4d-4c80-adfc-2aa87a264e1e", "address": "fa:16:3e:31:2c:82", "network": {"id": "534e08bb-ebea-429f-8a3d-733c418ea99b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1143213928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6d1f876ee054beb89ca0eb0776ddcd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3b1da99-eb", "ovs_interfaceid": "d3b1da99-eb4d-4c80-adfc-2aa87a264e1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.269665] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a51880a-86e1-4fc2-b000-a7470ccaf91e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.278262] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db8756a-e29d-4436-a1d4-8580985c2718 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.317560] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a23c4d8-08a5-462b-8f3e-4bbb2486231f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.325627] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0484732e-49d6-4d9b-b258-71ab49f46505 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.341860] env[61629]: DEBUG nova.compute.provider_tree [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.560909] env[61629]: DEBUG nova.compute.manager [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 973.581808] env[61629]: DEBUG nova.network.neutron [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Successfully created port: 5cbaf922-48cc-4c43-94b0-e00c9c88c48f {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 973.690950] env[61629]: DEBUG nova.compute.manager [req-29553573-8cb8-42df-8ebe-a17700e466b2 req-4a60ab53-4b6b-4b87-9d5c-ab3e9ae2e9d3 service nova] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Received event network-changed-d3b1da99-eb4d-4c80-adfc-2aa87a264e1e {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 973.690950] env[61629]: DEBUG nova.compute.manager [req-29553573-8cb8-42df-8ebe-a17700e466b2 req-4a60ab53-4b6b-4b87-9d5c-ab3e9ae2e9d3 service nova] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Refreshing instance network info cache due to event network-changed-d3b1da99-eb4d-4c80-adfc-2aa87a264e1e. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 973.690950] env[61629]: DEBUG oslo_concurrency.lockutils [req-29553573-8cb8-42df-8ebe-a17700e466b2 req-4a60ab53-4b6b-4b87-9d5c-ab3e9ae2e9d3 service nova] Acquiring lock "refresh_cache-3994458a-195a-478c-b6d0-d8e36df989a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.707804] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "refresh_cache-3994458a-195a-478c-b6d0-d8e36df989a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.708674] env[61629]: DEBUG nova.compute.manager [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Instance network_info: |[{"id": "d3b1da99-eb4d-4c80-adfc-2aa87a264e1e", "address": "fa:16:3e:31:2c:82", "network": {"id": "534e08bb-ebea-429f-8a3d-733c418ea99b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1143213928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6d1f876ee054beb89ca0eb0776ddcd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3b1da99-eb", "ovs_interfaceid": "d3b1da99-eb4d-4c80-adfc-2aa87a264e1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 973.710507] env[61629]: DEBUG oslo_concurrency.lockutils [req-29553573-8cb8-42df-8ebe-a17700e466b2 req-4a60ab53-4b6b-4b87-9d5c-ab3e9ae2e9d3 service nova] Acquired lock "refresh_cache-3994458a-195a-478c-b6d0-d8e36df989a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.711147] env[61629]: DEBUG nova.network.neutron [req-29553573-8cb8-42df-8ebe-a17700e466b2 req-4a60ab53-4b6b-4b87-9d5c-ab3e9ae2e9d3 service nova] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Refreshing network info cache for port d3b1da99-eb4d-4c80-adfc-2aa87a264e1e {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 973.717257] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:2c:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98f447de-d71e-41ef-bc37-ed97b4a1f58f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3b1da99-eb4d-4c80-adfc-2aa87a264e1e', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 973.735377] env[61629]: DEBUG oslo.service.loopingcall [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 973.735377] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 973.735377] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7045c2b7-30f9-465f-bc4e-399e870374be {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.759531] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 973.759531] env[61629]: value = "task-1354461" [ 973.759531] env[61629]: _type = "Task" [ 973.759531] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.769462] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354461, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.847040] env[61629]: DEBUG nova.scheduler.client.report [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 974.268998] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354461, 'name': CreateVM_Task, 'duration_secs': 0.419344} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.269346] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 974.270152] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.270477] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.270913] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 974.271257] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-558c4cdc-2c5f-47dd-8353-4fb79e871d80 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.279716] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 974.279716] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5269f9dc-82de-eaac-f8f8-d07e2bf44ae0" [ 974.279716] env[61629]: _type = "Task" [ 974.279716] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.292840] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5269f9dc-82de-eaac-f8f8-d07e2bf44ae0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.351504] env[61629]: DEBUG oslo_concurrency.lockutils [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.806s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.354141] env[61629]: DEBUG oslo_concurrency.lockutils [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.169s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.354454] env[61629]: DEBUG nova.objects.instance [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Lazy-loading 'resources' on Instance uuid 87172592-f557-467f-ace2-805fd822681d {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 974.373615] env[61629]: INFO nova.scheduler.client.report [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Deleted allocations for instance 7c3e9d0f-88a8-41fe-bf61-e3db34d36928 [ 974.377160] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Acquiring lock "0daebf05-e42b-49c5-aa24-43304a1c3cc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.377160] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Lock "0daebf05-e42b-49c5-aa24-43304a1c3cc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.467062] env[61629]: DEBUG nova.network.neutron [req-29553573-8cb8-42df-8ebe-a17700e466b2 req-4a60ab53-4b6b-4b87-9d5c-ab3e9ae2e9d3 service nova] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Updated VIF entry in instance network info cache for port d3b1da99-eb4d-4c80-adfc-2aa87a264e1e. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 974.467446] env[61629]: DEBUG nova.network.neutron [req-29553573-8cb8-42df-8ebe-a17700e466b2 req-4a60ab53-4b6b-4b87-9d5c-ab3e9ae2e9d3 service nova] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Updating instance_info_cache with network_info: [{"id": "d3b1da99-eb4d-4c80-adfc-2aa87a264e1e", "address": "fa:16:3e:31:2c:82", "network": {"id": "534e08bb-ebea-429f-8a3d-733c418ea99b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1143213928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6d1f876ee054beb89ca0eb0776ddcd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3b1da99-eb", "ovs_interfaceid": "d3b1da99-eb4d-4c80-adfc-2aa87a264e1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.570034] env[61629]: DEBUG nova.compute.manager [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 974.597594] env[61629]: DEBUG nova.virt.hardware [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 974.597900] env[61629]: DEBUG nova.virt.hardware [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 974.598020] env[61629]: DEBUG nova.virt.hardware [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 974.598254] env[61629]: DEBUG nova.virt.hardware [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 974.598428] env[61629]: DEBUG nova.virt.hardware [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 974.598600] env[61629]: DEBUG nova.virt.hardware [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 974.598849] env[61629]: DEBUG nova.virt.hardware [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 974.599023] env[61629]: DEBUG nova.virt.hardware [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 974.599237] env[61629]: DEBUG nova.virt.hardware [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 974.599428] env[61629]: DEBUG nova.virt.hardware [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 974.599637] env[61629]: DEBUG nova.virt.hardware [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 974.600530] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2e363c-c6ce-4aa7-a532-3f00db7834d0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.609752] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b4c6d1-0de6-4826-9369-f8be72807801 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.790667] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5269f9dc-82de-eaac-f8f8-d07e2bf44ae0, 'name': SearchDatastore_Task, 'duration_secs': 0.012812} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.790976] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.791237] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 974.791487] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.791626] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.791898] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 974.792666] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-430911f5-3b23-4bc1-88e2-55b5a7c63e9c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.801160] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 974.801365] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 974.802188] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-833956f7-52b7-48bc-ba3d-53616be89bc3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.807384] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 974.807384] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]520b96ee-654f-2014-d3e8-3db819bd2361" [ 974.807384] env[61629]: _type = "Task" [ 974.807384] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.815089] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]520b96ee-654f-2014-d3e8-3db819bd2361, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.882750] env[61629]: DEBUG nova.compute.manager [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 974.889026] env[61629]: DEBUG oslo_concurrency.lockutils [None req-931625d2-da30-456e-a8c5-a8fc8c1e0179 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "7c3e9d0f-88a8-41fe-bf61-e3db34d36928" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.133s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.970421] env[61629]: DEBUG oslo_concurrency.lockutils [req-29553573-8cb8-42df-8ebe-a17700e466b2 req-4a60ab53-4b6b-4b87-9d5c-ab3e9ae2e9d3 service nova] Releasing lock "refresh_cache-3994458a-195a-478c-b6d0-d8e36df989a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.019896] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b28d463-18bb-4576-84ef-c6b222048085 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.027764] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ec7415-4ed2-4677-813b-53c0c20e722d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.060100] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bab578d-aab2-4751-a383-1577636c4402 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.067821] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d1910d-6d8c-4653-a0e6-76a0dae6f060 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.081539] env[61629]: DEBUG nova.compute.provider_tree [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.322285] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]520b96ee-654f-2014-d3e8-3db819bd2361, 'name': SearchDatastore_Task, 'duration_secs': 0.030435} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.323173] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3620c18-0232-4cbe-aa8d-150aa527e92e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.331836] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 975.331836] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]527977da-c02f-5f5e-bc5e-840d9287a47c" [ 975.331836] env[61629]: _type = "Task" [ 975.331836] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.337640] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]527977da-c02f-5f5e-bc5e-840d9287a47c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.405396] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.419270] env[61629]: DEBUG oslo_concurrency.lockutils [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "09890839-b1d9-4558-992d-b1a6f4c5f750" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.419624] env[61629]: DEBUG oslo_concurrency.lockutils [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "09890839-b1d9-4558-992d-b1a6f4c5f750" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.419860] env[61629]: DEBUG oslo_concurrency.lockutils [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "09890839-b1d9-4558-992d-b1a6f4c5f750-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.420952] env[61629]: DEBUG oslo_concurrency.lockutils [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "09890839-b1d9-4558-992d-b1a6f4c5f750-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.420952] env[61629]: DEBUG oslo_concurrency.lockutils [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "09890839-b1d9-4558-992d-b1a6f4c5f750-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.422520] env[61629]: INFO nova.compute.manager [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Terminating instance [ 975.424579] env[61629]: DEBUG nova.compute.manager [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 975.425859] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 975.425859] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a5e3d3-0c8d-45d0-84bd-13017ed1f7ba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.434387] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 975.434387] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec76507d-7919-42f7-8bda-f06d69c0fb71 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.437555] env[61629]: DEBUG nova.network.neutron [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Successfully updated port: 5cbaf922-48cc-4c43-94b0-e00c9c88c48f {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 975.440835] env[61629]: DEBUG oslo_vmware.api [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 975.440835] env[61629]: value = "task-1354462" [ 975.440835] env[61629]: _type = "Task" [ 975.440835] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.450092] env[61629]: DEBUG oslo_vmware.api [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354462, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.546905] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Volume attach. Driver type: vmdk {{(pid=61629) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 975.547253] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288563', 'volume_id': '8806a247-abb8-469b-9cc7-f3e68e3d20de', 'name': 'volume-8806a247-abb8-469b-9cc7-f3e68e3d20de', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '459c5f25-8fb1-4e43-8f7f-359a7ff697f2', 'attached_at': '', 'detached_at': '', 'volume_id': '8806a247-abb8-469b-9cc7-f3e68e3d20de', 'serial': '8806a247-abb8-469b-9cc7-f3e68e3d20de'} {{(pid=61629) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 975.548313] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80cf05e5-684d-498e-9bfc-1a1adbe374cc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.569545] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4b2987-3bae-4796-9839-da2a64de549e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.590026] env[61629]: DEBUG nova.scheduler.client.report [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 975.602628] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] volume-8806a247-abb8-469b-9cc7-f3e68e3d20de/volume-8806a247-abb8-469b-9cc7-f3e68e3d20de.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 975.603453] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a8ffedb-e8d9-4de5-bb78-e0f3e1951660 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.624849] env[61629]: DEBUG oslo_vmware.api [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 975.624849] env[61629]: value = "task-1354463" [ 975.624849] env[61629]: _type = "Task" [ 975.624849] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.634900] env[61629]: DEBUG oslo_vmware.api [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354463, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.846161] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]527977da-c02f-5f5e-bc5e-840d9287a47c, 'name': SearchDatastore_Task, 'duration_secs': 0.034755} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.846550] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.846827] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 3994458a-195a-478c-b6d0-d8e36df989a3/3994458a-195a-478c-b6d0-d8e36df989a3.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 975.847176] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-636512b5-9bbe-4e3b-b421-317033ee71cd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.859437] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 975.859437] env[61629]: value = "task-1354464" [ 975.859437] env[61629]: _type = "Task" [ 975.859437] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.868147] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354464, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.940762] env[61629]: DEBUG nova.compute.manager [req-8c1dab84-7381-4cbb-9dd6-edd97ab1deaf req-30f4d959-3dab-4211-9288-c15c7ed3fc46 service nova] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Received event network-vif-plugged-5cbaf922-48cc-4c43-94b0-e00c9c88c48f {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 975.940971] env[61629]: DEBUG oslo_concurrency.lockutils [req-8c1dab84-7381-4cbb-9dd6-edd97ab1deaf req-30f4d959-3dab-4211-9288-c15c7ed3fc46 service nova] Acquiring lock "22f71f92-ca9a-4b97-a652-3f34a0dabde2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.941335] env[61629]: DEBUG oslo_concurrency.lockutils [req-8c1dab84-7381-4cbb-9dd6-edd97ab1deaf req-30f4d959-3dab-4211-9288-c15c7ed3fc46 service nova] Lock "22f71f92-ca9a-4b97-a652-3f34a0dabde2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.941571] env[61629]: DEBUG oslo_concurrency.lockutils [req-8c1dab84-7381-4cbb-9dd6-edd97ab1deaf req-30f4d959-3dab-4211-9288-c15c7ed3fc46 service nova] Lock "22f71f92-ca9a-4b97-a652-3f34a0dabde2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.941824] env[61629]: DEBUG nova.compute.manager [req-8c1dab84-7381-4cbb-9dd6-edd97ab1deaf req-30f4d959-3dab-4211-9288-c15c7ed3fc46 service nova] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] No waiting events found dispatching network-vif-plugged-5cbaf922-48cc-4c43-94b0-e00c9c88c48f {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 975.942070] env[61629]: WARNING nova.compute.manager [req-8c1dab84-7381-4cbb-9dd6-edd97ab1deaf req-30f4d959-3dab-4211-9288-c15c7ed3fc46 service nova] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Received unexpected event network-vif-plugged-5cbaf922-48cc-4c43-94b0-e00c9c88c48f for instance with vm_state building and task_state spawning. [ 975.942327] env[61629]: DEBUG nova.compute.manager [req-8c1dab84-7381-4cbb-9dd6-edd97ab1deaf req-30f4d959-3dab-4211-9288-c15c7ed3fc46 service nova] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Received event network-changed-5cbaf922-48cc-4c43-94b0-e00c9c88c48f {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 975.942572] env[61629]: DEBUG nova.compute.manager [req-8c1dab84-7381-4cbb-9dd6-edd97ab1deaf req-30f4d959-3dab-4211-9288-c15c7ed3fc46 service nova] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Refreshing instance network info cache due to event network-changed-5cbaf922-48cc-4c43-94b0-e00c9c88c48f. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 975.946021] env[61629]: DEBUG oslo_concurrency.lockutils [req-8c1dab84-7381-4cbb-9dd6-edd97ab1deaf req-30f4d959-3dab-4211-9288-c15c7ed3fc46 service nova] Acquiring lock "refresh_cache-22f71f92-ca9a-4b97-a652-3f34a0dabde2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.946021] env[61629]: DEBUG oslo_concurrency.lockutils [req-8c1dab84-7381-4cbb-9dd6-edd97ab1deaf req-30f4d959-3dab-4211-9288-c15c7ed3fc46 service nova] Acquired lock "refresh_cache-22f71f92-ca9a-4b97-a652-3f34a0dabde2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.946021] env[61629]: DEBUG nova.network.neutron [req-8c1dab84-7381-4cbb-9dd6-edd97ab1deaf req-30f4d959-3dab-4211-9288-c15c7ed3fc46 service nova] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Refreshing network info cache for port 5cbaf922-48cc-4c43-94b0-e00c9c88c48f {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 975.946021] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "refresh_cache-22f71f92-ca9a-4b97-a652-3f34a0dabde2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.956436] env[61629]: DEBUG oslo_vmware.api [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354462, 'name': PowerOffVM_Task, 'duration_secs': 0.253344} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.956436] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 975.956436] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 975.956625] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-90eefaec-ee78-42aa-a399-b6255cb3a53d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.104550] env[61629]: DEBUG oslo_concurrency.lockutils [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.750s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.110759] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 4.847s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.110953] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.111129] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61629) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 976.111467] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.706s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.113178] env[61629]: INFO nova.compute.claims [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 976.117092] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c493b5-0f4e-4530-af60-cb5fc00807ec {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.136016] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d7087f-7560-45c0-9edb-4031926021d2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.141682] env[61629]: INFO nova.scheduler.client.report [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Deleted allocations for instance 87172592-f557-467f-ace2-805fd822681d [ 976.166614] env[61629]: DEBUG oslo_vmware.api [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354463, 'name': ReconfigVM_Task, 'duration_secs': 0.416343} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.168574] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43809f83-ccdb-4f44-ad6a-5e40e3f4ab2f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.172024] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Reconfigured VM instance instance-00000058 to attach disk [datastore1] volume-8806a247-abb8-469b-9cc7-f3e68e3d20de/volume-8806a247-abb8-469b-9cc7-f3e68e3d20de.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 976.177399] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f27e8b48-6641-4787-aecc-65b23009c259 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.199399] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd64226-d152-4ede-8ce5-99ade5773a29 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.207742] env[61629]: DEBUG oslo_vmware.api [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 976.207742] env[61629]: value = "task-1354466" [ 976.207742] env[61629]: _type = "Task" [ 976.207742] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.242933] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180303MB free_disk=151GB free_vcpus=48 pci_devices=None {{(pid=61629) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 976.242933] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.249805] env[61629]: DEBUG oslo_vmware.api [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354466, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.372658] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354464, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.505125] env[61629]: DEBUG nova.network.neutron [req-8c1dab84-7381-4cbb-9dd6-edd97ab1deaf req-30f4d959-3dab-4211-9288-c15c7ed3fc46 service nova] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 976.670582] env[61629]: DEBUG oslo_concurrency.lockutils [None req-df81db42-9f6c-414e-af30-d08d2b885814 tempest-ServerTagsTestJSON-168685904 tempest-ServerTagsTestJSON-168685904-project-member] Lock "87172592-f557-467f-ace2-805fd822681d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.508s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.678842] env[61629]: DEBUG nova.network.neutron [req-8c1dab84-7381-4cbb-9dd6-edd97ab1deaf req-30f4d959-3dab-4211-9288-c15c7ed3fc46 service nova] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.718313] env[61629]: DEBUG oslo_vmware.api [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354466, 'name': ReconfigVM_Task, 'duration_secs': 0.152658} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.718710] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288563', 'volume_id': '8806a247-abb8-469b-9cc7-f3e68e3d20de', 'name': 'volume-8806a247-abb8-469b-9cc7-f3e68e3d20de', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '459c5f25-8fb1-4e43-8f7f-359a7ff697f2', 'attached_at': '', 'detached_at': '', 'volume_id': '8806a247-abb8-469b-9cc7-f3e68e3d20de', 'serial': '8806a247-abb8-469b-9cc7-f3e68e3d20de'} {{(pid=61629) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 976.871147] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354464, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.605736} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.871454] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 3994458a-195a-478c-b6d0-d8e36df989a3/3994458a-195a-478c-b6d0-d8e36df989a3.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 976.871674] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 976.871952] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1627fbcc-9fd5-4861-8748-0ddfc887d881 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.878746] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 976.878746] env[61629]: value = "task-1354467" [ 976.878746] env[61629]: _type = "Task" [ 976.878746] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.886633] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354467, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.184835] env[61629]: DEBUG oslo_concurrency.lockutils [req-8c1dab84-7381-4cbb-9dd6-edd97ab1deaf req-30f4d959-3dab-4211-9288-c15c7ed3fc46 service nova] Releasing lock "refresh_cache-22f71f92-ca9a-4b97-a652-3f34a0dabde2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.189566] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "refresh_cache-22f71f92-ca9a-4b97-a652-3f34a0dabde2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.189742] env[61629]: DEBUG nova.network.neutron [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 977.306355] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 977.306589] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 977.306772] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Deleting the datastore file [datastore1] 09890839-b1d9-4558-992d-b1a6f4c5f750 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 977.307065] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8379978-c0b5-4e72-931f-c151be7c3bb1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.315638] env[61629]: DEBUG oslo_vmware.api [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for the task: (returnval){ [ 977.315638] env[61629]: value = "task-1354468" [ 977.315638] env[61629]: _type = "Task" [ 977.315638] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.363218] env[61629]: DEBUG oslo_vmware.api [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354468, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.363218] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de06323-f274-4390-84ec-82fec5176187 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.363218] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3be102-f773-4179-9c6e-c395f50a38e1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.393062] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40584e7a-4336-400d-ab3f-3b052e17ca70 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.404614] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5842d4-ea8e-4e22-982f-5bb4dc5e414a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.409220] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354467, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06705} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.409721] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 977.411347] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5888b058-765c-4058-865d-166d10fc5209 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.422873] env[61629]: DEBUG nova.compute.provider_tree [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.452072] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 3994458a-195a-478c-b6d0-d8e36df989a3/3994458a-195a-478c-b6d0-d8e36df989a3.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 977.452072] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0600c3a-8b08-40d6-9591-25b9781c5d27 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.473664] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 977.473664] env[61629]: value = "task-1354469" [ 977.473664] env[61629]: _type = "Task" [ 977.473664] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.483015] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354469, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.780117] env[61629]: DEBUG nova.network.neutron [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 977.781682] env[61629]: DEBUG nova.objects.instance [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lazy-loading 'flavor' on Instance uuid 459c5f25-8fb1-4e43-8f7f-359a7ff697f2 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.839979] env[61629]: DEBUG oslo_vmware.api [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Task: {'id': task-1354468, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.296633} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.840673] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 977.841064] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 977.841360] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 977.841684] env[61629]: INFO nova.compute.manager [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Took 2.42 seconds to destroy the instance on the hypervisor. [ 977.842118] env[61629]: DEBUG oslo.service.loopingcall [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 977.844132] env[61629]: DEBUG nova.compute.manager [-] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 977.844132] env[61629]: DEBUG nova.network.neutron [-] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 977.926852] env[61629]: DEBUG nova.scheduler.client.report [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 977.985727] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354469, 'name': ReconfigVM_Task, 'duration_secs': 0.409119} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.986101] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 3994458a-195a-478c-b6d0-d8e36df989a3/3994458a-195a-478c-b6d0-d8e36df989a3.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 977.986743] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f7dc444d-ae27-4321-956e-158fbd905935 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.993957] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 977.993957] env[61629]: value = "task-1354470" [ 977.993957] env[61629]: _type = "Task" [ 977.993957] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.003895] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354470, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.212953] env[61629]: DEBUG nova.network.neutron [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Updating instance_info_cache with network_info: [{"id": "5cbaf922-48cc-4c43-94b0-e00c9c88c48f", "address": "fa:16:3e:68:b0:52", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5cbaf922-48", "ovs_interfaceid": "5cbaf922-48cc-4c43-94b0-e00c9c88c48f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.290064] env[61629]: DEBUG oslo_concurrency.lockutils [None req-504c321d-8e2a-42c4-b01a-7b857da8940a tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.349s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.431942] env[61629]: DEBUG nova.compute.manager [req-130d75a7-05d3-42c9-abfc-66dd3548d48f req-14dc6094-d0a6-4e4a-aeb8-55babe12466a service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Received event network-vif-deleted-91aa1640-3097-4a26-9090-4081740f917d {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 978.432189] env[61629]: INFO nova.compute.manager [req-130d75a7-05d3-42c9-abfc-66dd3548d48f req-14dc6094-d0a6-4e4a-aeb8-55babe12466a service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Neutron deleted interface 91aa1640-3097-4a26-9090-4081740f917d; detaching it from the instance and deleting it from the info cache [ 978.432864] env[61629]: DEBUG nova.network.neutron [req-130d75a7-05d3-42c9-abfc-66dd3548d48f req-14dc6094-d0a6-4e4a-aeb8-55babe12466a service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.438020] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.324s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.438020] env[61629]: DEBUG nova.compute.manager [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 978.439627] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.197s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.471427] env[61629]: INFO nova.compute.manager [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Rescuing [ 978.475111] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.475111] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.475111] env[61629]: DEBUG nova.network.neutron [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 978.506565] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354470, 'name': Rename_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.717343] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "refresh_cache-22f71f92-ca9a-4b97-a652-3f34a0dabde2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.717689] env[61629]: DEBUG nova.compute.manager [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Instance network_info: |[{"id": "5cbaf922-48cc-4c43-94b0-e00c9c88c48f", "address": "fa:16:3e:68:b0:52", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5cbaf922-48", "ovs_interfaceid": "5cbaf922-48cc-4c43-94b0-e00c9c88c48f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 978.718149] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:b0:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5cbaf922-48cc-4c43-94b0-e00c9c88c48f', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 978.726757] env[61629]: DEBUG oslo.service.loopingcall [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 978.726757] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 978.726757] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9438cc7-211b-43f7-99a2-409abcf5949c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.749609] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 978.749609] env[61629]: value = "task-1354471" [ 978.749609] env[61629]: _type = "Task" [ 978.749609] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.758406] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354471, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.863129] env[61629]: DEBUG nova.network.neutron [-] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.940935] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-422a1f47-0fab-4da9-9ae1-58a8bad398da {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.944296] env[61629]: DEBUG nova.compute.utils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 978.952895] env[61629]: DEBUG nova.compute.manager [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 978.953074] env[61629]: DEBUG nova.network.neutron [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 978.971917] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a325977f-11c9-409b-a877-ebf78f301303 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.986662] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 09890839-b1d9-4558-992d-b1a6f4c5f750 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.986793] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 87a1383f-d66b-4bde-b153-89ac62ff8390 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.986915] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 0d21b352-bdd0-4887-8658-cd5c448352d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.987048] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance c5b6f6b8-587c-4b74-bc83-98dac319b15b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.987168] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 459c5f25-8fb1-4e43-8f7f-359a7ff697f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.987282] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.987391] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance a08e5762-5307-4dd8-a025-a1cdfd43025e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.987503] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 3994458a-195a-478c-b6d0-d8e36df989a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.987608] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 22f71f92-ca9a-4b97-a652-3f34a0dabde2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.987715] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 0daebf05-e42b-49c5-aa24-43304a1c3cc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.987921] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 978.988045] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 979.004022] env[61629]: DEBUG nova.compute.manager [req-130d75a7-05d3-42c9-abfc-66dd3548d48f req-14dc6094-d0a6-4e4a-aeb8-55babe12466a service nova] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Detach interface failed, port_id=91aa1640-3097-4a26-9090-4081740f917d, reason: Instance 09890839-b1d9-4558-992d-b1a6f4c5f750 could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 979.017326] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354470, 'name': Rename_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.075561] env[61629]: DEBUG nova.policy [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a5ea4cfd7f404650b16626208eb8559c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b9dfc16a6d1479883f21028bbb9dd58', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 979.158545] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ee8ac1-8225-4249-b919-6679677dac16 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.167266] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01739fd8-4e86-4051-924d-239e9d75174b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.201332] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4197cd-5340-4559-ac77-4fe6365e7f1a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.209642] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5daf93c7-e1e6-41f5-91b7-e2676636ea30 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.225015] env[61629]: DEBUG nova.compute.provider_tree [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 979.260673] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354471, 'name': CreateVM_Task, 'duration_secs': 0.349283} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.260864] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 979.261616] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.261809] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.263132] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 979.263132] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cbe6bfc-b720-4068-8e72-b14997d776f2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.268520] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 979.268520] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]523f6bb6-a9d9-ce14-8aaf-f989a241d013" [ 979.268520] env[61629]: _type = "Task" [ 979.268520] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.280162] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]523f6bb6-a9d9-ce14-8aaf-f989a241d013, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.367846] env[61629]: INFO nova.compute.manager [-] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Took 1.53 seconds to deallocate network for instance. [ 979.454277] env[61629]: DEBUG nova.compute.manager [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 979.508480] env[61629]: DEBUG nova.network.neutron [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Updating instance_info_cache with network_info: [{"id": "c827ba81-d74a-4ff3-bfc2-81b5e09c683c", "address": "fa:16:3e:6d:96:2d", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc827ba81-d7", "ovs_interfaceid": "c827ba81-d74a-4ff3-bfc2-81b5e09c683c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.515740] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354470, 'name': Rename_Task, 'duration_secs': 1.167256} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.515975] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 979.516247] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4b1d3b8-610b-4092-ba58-9fb79f027c6d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.523024] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 979.523024] env[61629]: value = "task-1354472" [ 979.523024] env[61629]: _type = "Task" [ 979.523024] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.532598] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354472, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.634904] env[61629]: DEBUG nova.network.neutron [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Successfully created port: abf6c35f-b11c-4f9e-b605-8f889ccf39ab {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 979.732200] env[61629]: DEBUG nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 979.780835] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]523f6bb6-a9d9-ce14-8aaf-f989a241d013, 'name': SearchDatastore_Task, 'duration_secs': 0.011848} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.781206] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.781482] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 979.781735] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.781886] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.782082] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 979.782367] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-463ea017-191f-4108-90d2-fa5c5b48434b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.794959] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 979.794959] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 979.795578] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68c58c02-08f6-49cb-ac05-51268dd8d270 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.801753] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 979.801753] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52f267b4-8e52-e8d3-3cdc-06869238febd" [ 979.801753] env[61629]: _type = "Task" [ 979.801753] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.810646] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52f267b4-8e52-e8d3-3cdc-06869238febd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.875865] env[61629]: DEBUG oslo_concurrency.lockutils [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.013045] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.036623] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354472, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.242092] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61629) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 980.242326] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.803s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.242621] env[61629]: DEBUG oslo_concurrency.lockutils [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.367s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.242851] env[61629]: DEBUG nova.objects.instance [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lazy-loading 'resources' on Instance uuid 09890839-b1d9-4558-992d-b1a6f4c5f750 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 980.313081] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52f267b4-8e52-e8d3-3cdc-06869238febd, 'name': SearchDatastore_Task, 'duration_secs': 0.011683} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.314164] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07ae29a7-ca51-4ac3-a301-ab9d1f183456 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.319888] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 980.319888] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52695daa-1b87-f08f-1c02-426343ccdf2a" [ 980.319888] env[61629]: _type = "Task" [ 980.319888] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.327319] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52695daa-1b87-f08f-1c02-426343ccdf2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.466020] env[61629]: DEBUG nova.compute.manager [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 980.491322] env[61629]: DEBUG nova.virt.hardware [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 980.491588] env[61629]: DEBUG nova.virt.hardware [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 980.491748] env[61629]: DEBUG nova.virt.hardware [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 980.491930] env[61629]: DEBUG nova.virt.hardware [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 980.494936] env[61629]: DEBUG nova.virt.hardware [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 980.495177] env[61629]: DEBUG nova.virt.hardware [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 980.495415] env[61629]: DEBUG nova.virt.hardware [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 980.495585] env[61629]: DEBUG nova.virt.hardware [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 980.495757] env[61629]: DEBUG nova.virt.hardware [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 980.495924] env[61629]: DEBUG nova.virt.hardware [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 980.496111] env[61629]: DEBUG nova.virt.hardware [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 980.497038] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3eb4174-db17-49d0-8cae-ce466318cacb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.505599] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d925508e-4a30-4253-9b94-7ba575794288 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.536957] env[61629]: DEBUG oslo_vmware.api [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354472, 'name': PowerOnVM_Task, 'duration_secs': 0.536161} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.537261] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 980.539021] env[61629]: INFO nova.compute.manager [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Took 10.11 seconds to spawn the instance on the hypervisor. [ 980.539021] env[61629]: DEBUG nova.compute.manager [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 980.539021] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45e4bf7-bdcb-4f94-a62d-dc49b854dc25 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.592995] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.592995] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.593142] env[61629]: INFO nova.compute.manager [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Shelving [ 980.609039] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 980.610183] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95dbcb61-8152-4a8e-baa8-ce6b977e46c7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.618358] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 980.618358] env[61629]: value = "task-1354473" [ 980.618358] env[61629]: _type = "Task" [ 980.618358] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.626973] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354473, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.833558] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52695daa-1b87-f08f-1c02-426343ccdf2a, 'name': SearchDatastore_Task, 'duration_secs': 0.052135} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.836289] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.836578] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 22f71f92-ca9a-4b97-a652-3f34a0dabde2/22f71f92-ca9a-4b97-a652-3f34a0dabde2.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 980.837048] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3098813-1cb4-4a04-8689-93cc812271c0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.843698] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 980.843698] env[61629]: value = "task-1354474" [ 980.843698] env[61629]: _type = "Task" [ 980.843698] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.854221] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354474, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.895051] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a32916-2407-411f-bd1c-3f70b3bee973 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.903419] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1accab24-01c3-404e-a3c5-20177a98e6d8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.936188] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccf2c87-ec86-41e4-ab02-3cc4fdc7c665 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.944300] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842be1a9-5844-4ab3-8380-eb8df58f5f71 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.958370] env[61629]: DEBUG nova.compute.provider_tree [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.056299] env[61629]: INFO nova.compute.manager [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Took 18.23 seconds to build instance. [ 981.100360] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 981.100632] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1cb6caf-9195-47e6-b8b2-a06bb9c4ffe4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.107912] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 981.107912] env[61629]: value = "task-1354475" [ 981.107912] env[61629]: _type = "Task" [ 981.107912] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.116614] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354475, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.127525] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354473, 'name': PowerOffVM_Task, 'duration_secs': 0.213035} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.127804] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 981.128676] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7090e173-446c-435f-a1d7-04da3f33f283 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.151999] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c008b61a-b781-403b-9a4d-09a6b2df03e1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.169398] env[61629]: DEBUG oslo_vmware.rw_handles [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521ca043-2ea7-59f3-2189-848fc7666617/disk-0.vmdk. {{(pid=61629) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 981.170392] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2838e47-139b-41e8-bcfd-3d7397a303e7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.178550] env[61629]: DEBUG oslo_vmware.rw_handles [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521ca043-2ea7-59f3-2189-848fc7666617/disk-0.vmdk is in state: ready. {{(pid=61629) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 981.178764] env[61629]: ERROR oslo_vmware.rw_handles [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521ca043-2ea7-59f3-2189-848fc7666617/disk-0.vmdk due to incomplete transfer. [ 981.180739] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 981.180994] env[61629]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6cac31dd-c007-4fcc-a05f-7cb14324b479 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.182608] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-642dd4e5-edd9-44c9-acfb-ac2681e3395a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.189966] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 981.189966] env[61629]: value = "task-1354476" [ 981.189966] env[61629]: _type = "Task" [ 981.189966] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.191114] env[61629]: DEBUG oslo_vmware.rw_handles [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521ca043-2ea7-59f3-2189-848fc7666617/disk-0.vmdk. {{(pid=61629) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 981.191339] env[61629]: DEBUG nova.virt.vmwareapi.images [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Uploaded image 6a4249c3-b2ae-449d-a87e-08a19b15fa29 to the Glance image server {{(pid=61629) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 981.193929] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Destroying the VM {{(pid=61629) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 981.197129] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-90106198-5dab-4d61-9825-5a95b1874cd4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.205842] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] VM already powered off {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 981.206063] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 981.206363] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.206540] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.206756] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 981.207110] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 981.207110] env[61629]: value = "task-1354477" [ 981.207110] env[61629]: _type = "Task" [ 981.207110] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.207327] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17f11bf6-f6c9-4b51-8486-745028bc4788 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.219233] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354477, 'name': Destroy_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.221178] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 981.221382] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 981.222208] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8407aca-63da-4c4c-8cdb-a67f9b4dd67e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.227560] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 981.227560] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]527b4494-afbf-8e1c-0708-902eda809c7a" [ 981.227560] env[61629]: _type = "Task" [ 981.227560] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.237984] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]527b4494-afbf-8e1c-0708-902eda809c7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.278926] env[61629]: DEBUG nova.compute.manager [req-eb8787f4-86e9-4890-9fec-3aac05a5fb36 req-ecbdcf4e-27ec-4062-95b8-33fc044e8245 service nova] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Received event network-vif-plugged-abf6c35f-b11c-4f9e-b605-8f889ccf39ab {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 981.279167] env[61629]: DEBUG oslo_concurrency.lockutils [req-eb8787f4-86e9-4890-9fec-3aac05a5fb36 req-ecbdcf4e-27ec-4062-95b8-33fc044e8245 service nova] Acquiring lock "0daebf05-e42b-49c5-aa24-43304a1c3cc0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.279461] env[61629]: DEBUG oslo_concurrency.lockutils [req-eb8787f4-86e9-4890-9fec-3aac05a5fb36 req-ecbdcf4e-27ec-4062-95b8-33fc044e8245 service nova] Lock "0daebf05-e42b-49c5-aa24-43304a1c3cc0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.279651] env[61629]: DEBUG oslo_concurrency.lockutils [req-eb8787f4-86e9-4890-9fec-3aac05a5fb36 req-ecbdcf4e-27ec-4062-95b8-33fc044e8245 service nova] Lock "0daebf05-e42b-49c5-aa24-43304a1c3cc0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.279819] env[61629]: DEBUG nova.compute.manager [req-eb8787f4-86e9-4890-9fec-3aac05a5fb36 req-ecbdcf4e-27ec-4062-95b8-33fc044e8245 service nova] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] No waiting events found dispatching network-vif-plugged-abf6c35f-b11c-4f9e-b605-8f889ccf39ab {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 981.279989] env[61629]: WARNING nova.compute.manager [req-eb8787f4-86e9-4890-9fec-3aac05a5fb36 req-ecbdcf4e-27ec-4062-95b8-33fc044e8245 service nova] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Received unexpected event network-vif-plugged-abf6c35f-b11c-4f9e-b605-8f889ccf39ab for instance with vm_state building and task_state spawning. [ 981.353970] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354474, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.417187] env[61629]: DEBUG nova.network.neutron [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Successfully updated port: abf6c35f-b11c-4f9e-b605-8f889ccf39ab {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 981.461922] env[61629]: DEBUG nova.scheduler.client.report [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 981.558788] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e2ad83cd-f945-4c07-b745-220410cafa00 tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "3994458a-195a-478c-b6d0-d8e36df989a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.752s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.623908] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354475, 'name': PowerOffVM_Task, 'duration_secs': 0.436989} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.623908] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 981.623908] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3334fd14-7d1d-4504-bc66-c3cb610c8638 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.644652] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828f5c1a-5d5f-444a-85a7-75eb8a311e3a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.721068] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354477, 'name': Destroy_Task} progress is 33%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.737282] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]527b4494-afbf-8e1c-0708-902eda809c7a, 'name': SearchDatastore_Task, 'duration_secs': 0.01144} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.738072] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fff22b83-149f-4d8d-b3e7-51ff2cb2e90a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.743726] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 981.743726] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52fe53ac-9577-1e4e-59ab-1fb80b046c56" [ 981.743726] env[61629]: _type = "Task" [ 981.743726] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.751741] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52fe53ac-9577-1e4e-59ab-1fb80b046c56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.854057] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354474, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.904762} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.854184] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 22f71f92-ca9a-4b97-a652-3f34a0dabde2/22f71f92-ca9a-4b97-a652-3f34a0dabde2.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 981.854353] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 981.854611] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6061a0b3-32cf-47bd-89b0-ccb6346c3cef {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.860781] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 981.860781] env[61629]: value = "task-1354478" [ 981.860781] env[61629]: _type = "Task" [ 981.860781] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.868889] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354478, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.922752] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Acquiring lock "refresh_cache-0daebf05-e42b-49c5-aa24-43304a1c3cc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.922968] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Acquired lock "refresh_cache-0daebf05-e42b-49c5-aa24-43304a1c3cc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.923194] env[61629]: DEBUG nova.network.neutron [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.967024] env[61629]: DEBUG oslo_concurrency.lockutils [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.724s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.988249] env[61629]: INFO nova.scheduler.client.report [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Deleted allocations for instance 09890839-b1d9-4558-992d-b1a6f4c5f750 [ 982.158725] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Creating Snapshot of the VM instance {{(pid=61629) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 982.159082] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-841bb4cd-38ae-4fb2-b6b0-6b48a7e478a8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.168102] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 982.168102] env[61629]: value = "task-1354479" [ 982.168102] env[61629]: _type = "Task" [ 982.168102] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.176098] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354479, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.219652] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354477, 'name': Destroy_Task, 'duration_secs': 0.737503} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.219893] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Destroyed the VM [ 982.220257] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Deleting Snapshot of the VM instance {{(pid=61629) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 982.220544] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a2ef2388-d0d1-4def-9fd5-5c2cafd8102e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.228011] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 982.228011] env[61629]: value = "task-1354480" [ 982.228011] env[61629]: _type = "Task" [ 982.228011] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.235696] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354480, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.254121] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52fe53ac-9577-1e4e-59ab-1fb80b046c56, 'name': SearchDatastore_Task, 'duration_secs': 0.013925} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.254386] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.254655] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 459c5f25-8fb1-4e43-8f7f-359a7ff697f2/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk. {{(pid=61629) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 982.254917] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-938d048f-6eba-47d4-81d0-618735e304dc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.260958] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 982.260958] env[61629]: value = "task-1354481" [ 982.260958] env[61629]: _type = "Task" [ 982.260958] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.268208] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354481, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.370102] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354478, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.230263} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.370391] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 982.371165] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656c7180-0189-4f40-8e0c-9014a74ae214 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.392517] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 22f71f92-ca9a-4b97-a652-3f34a0dabde2/22f71f92-ca9a-4b97-a652-3f34a0dabde2.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 982.392776] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afb8d02b-94ef-46c2-9130-e7a3ef3e2b58 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.411304] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 982.411304] env[61629]: value = "task-1354482" [ 982.411304] env[61629]: _type = "Task" [ 982.411304] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.419208] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354482, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.456018] env[61629]: DEBUG nova.network.neutron [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 982.497927] env[61629]: DEBUG oslo_concurrency.lockutils [None req-2b9ee0f6-eb8a-454b-8efc-32cd21810620 tempest-AttachInterfacesTestJSON-460657919 tempest-AttachInterfacesTestJSON-460657919-project-member] Lock "09890839-b1d9-4558-992d-b1a6f4c5f750" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.078s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.545067] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "3994458a-195a-478c-b6d0-d8e36df989a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.545366] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "3994458a-195a-478c-b6d0-d8e36df989a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.545592] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "3994458a-195a-478c-b6d0-d8e36df989a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.545801] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "3994458a-195a-478c-b6d0-d8e36df989a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.545993] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "3994458a-195a-478c-b6d0-d8e36df989a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.548817] env[61629]: INFO nova.compute.manager [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Terminating instance [ 982.550968] env[61629]: DEBUG nova.compute.manager [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 982.551178] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 982.552033] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ccb60d-eb78-4e99-b83b-facbf3e7a33f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.560060] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 982.560387] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6f70eb9-777f-400c-bee4-06f298d2a7ae {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.568993] env[61629]: DEBUG oslo_vmware.api [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 982.568993] env[61629]: value = "task-1354483" [ 982.568993] env[61629]: _type = "Task" [ 982.568993] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.579450] env[61629]: DEBUG oslo_vmware.api [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354483, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.602261] env[61629]: DEBUG nova.network.neutron [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Updating instance_info_cache with network_info: [{"id": "abf6c35f-b11c-4f9e-b605-8f889ccf39ab", "address": "fa:16:3e:77:e5:b9", "network": {"id": "96f751d9-f3a8-454c-b17b-00f7a06732de", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-696914612-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b9dfc16a6d1479883f21028bbb9dd58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabf6c35f-b1", "ovs_interfaceid": "abf6c35f-b11c-4f9e-b605-8f889ccf39ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.679520] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354479, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.737341] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354480, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.770934] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354481, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.921299] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354482, 'name': ReconfigVM_Task, 'duration_secs': 0.324099} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.921607] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 22f71f92-ca9a-4b97-a652-3f34a0dabde2/22f71f92-ca9a-4b97-a652-3f34a0dabde2.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 982.922259] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fe985564-49c2-4f0a-b7b0-4622df5f328a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.928952] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 982.928952] env[61629]: value = "task-1354484" [ 982.928952] env[61629]: _type = "Task" [ 982.928952] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.938013] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354484, 'name': Rename_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.078757] env[61629]: DEBUG oslo_vmware.api [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354483, 'name': PowerOffVM_Task, 'duration_secs': 0.303484} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.079067] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 983.079301] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 983.079563] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e701d13-13cf-441b-9dbc-676bd19ac41e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.104947] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Releasing lock "refresh_cache-0daebf05-e42b-49c5-aa24-43304a1c3cc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.105269] env[61629]: DEBUG nova.compute.manager [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Instance network_info: |[{"id": "abf6c35f-b11c-4f9e-b605-8f889ccf39ab", "address": "fa:16:3e:77:e5:b9", "network": {"id": "96f751d9-f3a8-454c-b17b-00f7a06732de", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-696914612-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b9dfc16a6d1479883f21028bbb9dd58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabf6c35f-b1", "ovs_interfaceid": "abf6c35f-b11c-4f9e-b605-8f889ccf39ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 983.105718] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:e5:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3117b312-701b-4439-b197-96b6c5cdca89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'abf6c35f-b11c-4f9e-b605-8f889ccf39ab', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 983.113451] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Creating folder: Project (2b9dfc16a6d1479883f21028bbb9dd58). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 983.113744] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-728964e6-e7eb-415d-9c4f-0aa89f153b95 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.124952] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Created folder: Project (2b9dfc16a6d1479883f21028bbb9dd58) in parent group-v288443. [ 983.125223] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Creating folder: Instances. Parent ref: group-v288567. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 983.125434] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8356e069-df79-4883-9601-4f61e8a59641 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.134476] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Created folder: Instances in parent group-v288567. [ 983.134732] env[61629]: DEBUG oslo.service.loopingcall [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 983.134926] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 983.135146] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5fa7f066-e0f1-4fa5-9cab-047d70ffc432 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.153595] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 983.153595] env[61629]: value = "task-1354488" [ 983.153595] env[61629]: _type = "Task" [ 983.153595] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.161179] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354488, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.176509] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354479, 'name': CreateSnapshot_Task, 'duration_secs': 0.966969} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.176788] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Created Snapshot of the VM instance {{(pid=61629) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 983.177527] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad0fced-48cc-4a36-aa86-d5d1b4f2d630 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.214021] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 983.214339] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 983.214587] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Deleting the datastore file [datastore1] 3994458a-195a-478c-b6d0-d8e36df989a3 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 983.214926] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d8a6bfb-b62f-400d-944e-caf9a48c0ded {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.222806] env[61629]: DEBUG oslo_vmware.api [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for the task: (returnval){ [ 983.222806] env[61629]: value = "task-1354489" [ 983.222806] env[61629]: _type = "Task" [ 983.222806] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.236323] env[61629]: DEBUG oslo_vmware.api [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354489, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.241736] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354480, 'name': RemoveSnapshot_Task, 'duration_secs': 0.81156} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.242018] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Deleted Snapshot of the VM instance {{(pid=61629) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 983.242307] env[61629]: DEBUG nova.compute.manager [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 983.243073] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-321ecef7-c5bf-476f-9ec7-f0c33335e277 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.271305] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354481, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.628698} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.271543] env[61629]: INFO nova.virt.vmwareapi.ds_util [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 459c5f25-8fb1-4e43-8f7f-359a7ff697f2/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk. [ 983.272445] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278a20ce-0d8d-41db-acbd-478a431608e5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.301638] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 459c5f25-8fb1-4e43-8f7f-359a7ff697f2/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 983.302771] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd5d684a-1512-4b05-b130-e2923bf8c585 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.321101] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 983.321101] env[61629]: value = "task-1354490" [ 983.321101] env[61629]: _type = "Task" [ 983.321101] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.329813] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354490, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.360861] env[61629]: DEBUG nova.compute.manager [req-59367b7f-45f0-48cd-b6b8-80fb1d7c00ed req-4a367dee-fb3f-4eb7-a1a0-7c25d4b68492 service nova] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Received event network-changed-abf6c35f-b11c-4f9e-b605-8f889ccf39ab {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 983.360861] env[61629]: DEBUG nova.compute.manager [req-59367b7f-45f0-48cd-b6b8-80fb1d7c00ed req-4a367dee-fb3f-4eb7-a1a0-7c25d4b68492 service nova] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Refreshing instance network info cache due to event network-changed-abf6c35f-b11c-4f9e-b605-8f889ccf39ab. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 983.360861] env[61629]: DEBUG oslo_concurrency.lockutils [req-59367b7f-45f0-48cd-b6b8-80fb1d7c00ed req-4a367dee-fb3f-4eb7-a1a0-7c25d4b68492 service nova] Acquiring lock "refresh_cache-0daebf05-e42b-49c5-aa24-43304a1c3cc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.360861] env[61629]: DEBUG oslo_concurrency.lockutils [req-59367b7f-45f0-48cd-b6b8-80fb1d7c00ed req-4a367dee-fb3f-4eb7-a1a0-7c25d4b68492 service nova] Acquired lock "refresh_cache-0daebf05-e42b-49c5-aa24-43304a1c3cc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.360861] env[61629]: DEBUG nova.network.neutron [req-59367b7f-45f0-48cd-b6b8-80fb1d7c00ed req-4a367dee-fb3f-4eb7-a1a0-7c25d4b68492 service nova] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Refreshing network info cache for port abf6c35f-b11c-4f9e-b605-8f889ccf39ab {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 983.438363] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354484, 'name': Rename_Task, 'duration_secs': 0.146921} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.438697] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 983.438962] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f9ebf9f-dd8e-4852-bef3-04cb9cafa5d8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.444872] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 983.444872] env[61629]: value = "task-1354491" [ 983.444872] env[61629]: _type = "Task" [ 983.444872] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.453897] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354491, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.663317] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354488, 'name': CreateVM_Task, 'duration_secs': 0.383617} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.663666] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 983.664216] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.664390] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.664723] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 983.664984] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea56a6d1-8de0-4c88-8956-ae05614ea6b0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.669600] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Waiting for the task: (returnval){ [ 983.669600] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]520f15b2-f0bb-c7e5-779c-e2133a845190" [ 983.669600] env[61629]: _type = "Task" [ 983.669600] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.677584] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]520f15b2-f0bb-c7e5-779c-e2133a845190, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.696787] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Creating linked-clone VM from snapshot {{(pid=61629) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 983.697135] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0baa7a47-6050-425b-9ced-1956e69a6a16 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.704405] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 983.704405] env[61629]: value = "task-1354492" [ 983.704405] env[61629]: _type = "Task" [ 983.704405] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.712886] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354492, 'name': CloneVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.730849] env[61629]: DEBUG oslo_vmware.api [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Task: {'id': task-1354489, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161861} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.731212] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 983.731332] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 983.731569] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 983.731719] env[61629]: INFO nova.compute.manager [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Took 1.18 seconds to destroy the instance on the hypervisor. [ 983.731998] env[61629]: DEBUG oslo.service.loopingcall [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 983.732626] env[61629]: DEBUG nova.compute.manager [-] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 983.732626] env[61629]: DEBUG nova.network.neutron [-] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 983.755801] env[61629]: INFO nova.compute.manager [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Shelve offloading [ 983.758135] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 983.758436] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98ca5228-3e7f-4047-81ea-6620de13d39f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.764941] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 983.764941] env[61629]: value = "task-1354493" [ 983.764941] env[61629]: _type = "Task" [ 983.764941] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.774510] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] VM already powered off {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 983.774510] env[61629]: DEBUG nova.compute.manager [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 983.775429] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7e6e60-ba29-4b70-bf3f-d50dd36a0b39 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.783719] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "refresh_cache-a08e5762-5307-4dd8-a025-a1cdfd43025e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.783936] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "refresh_cache-a08e5762-5307-4dd8-a025-a1cdfd43025e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.784164] env[61629]: DEBUG nova.network.neutron [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 983.831555] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354490, 'name': ReconfigVM_Task, 'duration_secs': 0.379344} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.831862] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 459c5f25-8fb1-4e43-8f7f-359a7ff697f2/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 983.832780] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c2f934-fff6-4d78-a675-93a65df57522 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.868316] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd89e831-319f-4431-9e9f-da9ce29e4d5d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.883553] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 983.883553] env[61629]: value = "task-1354494" [ 983.883553] env[61629]: _type = "Task" [ 983.883553] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.896261] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354494, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.954703] env[61629]: DEBUG oslo_vmware.api [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354491, 'name': PowerOnVM_Task, 'duration_secs': 0.461008} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.954988] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 983.955220] env[61629]: INFO nova.compute.manager [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Took 9.39 seconds to spawn the instance on the hypervisor. [ 983.955408] env[61629]: DEBUG nova.compute.manager [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 983.956232] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f008d9-6c44-48b5-8217-a73122993bdc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.180047] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]520f15b2-f0bb-c7e5-779c-e2133a845190, 'name': SearchDatastore_Task, 'duration_secs': 0.009214} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.180374] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.180618] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 984.180884] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.181048] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.181238] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 984.181499] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbf1236a-fc9e-4a5c-bd98-7d9324ddaa56 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.195072] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 984.195265] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 984.197412] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bd18457-95bc-43f1-b48f-c7316aa5eb6c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.201683] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Waiting for the task: (returnval){ [ 984.201683] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5254b63b-e43d-862f-a07e-2ffccc2f7c3b" [ 984.201683] env[61629]: _type = "Task" [ 984.201683] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.215220] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354492, 'name': CloneVM_Task} progress is 94%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.215464] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5254b63b-e43d-862f-a07e-2ffccc2f7c3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.394287] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354494, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.427108] env[61629]: DEBUG nova.network.neutron [req-59367b7f-45f0-48cd-b6b8-80fb1d7c00ed req-4a367dee-fb3f-4eb7-a1a0-7c25d4b68492 service nova] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Updated VIF entry in instance network info cache for port abf6c35f-b11c-4f9e-b605-8f889ccf39ab. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 984.427528] env[61629]: DEBUG nova.network.neutron [req-59367b7f-45f0-48cd-b6b8-80fb1d7c00ed req-4a367dee-fb3f-4eb7-a1a0-7c25d4b68492 service nova] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Updating instance_info_cache with network_info: [{"id": "abf6c35f-b11c-4f9e-b605-8f889ccf39ab", "address": "fa:16:3e:77:e5:b9", "network": {"id": "96f751d9-f3a8-454c-b17b-00f7a06732de", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-696914612-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b9dfc16a6d1479883f21028bbb9dd58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabf6c35f-b1", "ovs_interfaceid": "abf6c35f-b11c-4f9e-b605-8f889ccf39ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.477916] env[61629]: INFO nova.compute.manager [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Took 20.90 seconds to build instance. [ 984.720137] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5254b63b-e43d-862f-a07e-2ffccc2f7c3b, 'name': SearchDatastore_Task, 'duration_secs': 0.041452} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.725505] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354492, 'name': CloneVM_Task} progress is 94%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.725817] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83a6c3ae-6992-408b-953e-402f0ae3856b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.731977] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Waiting for the task: (returnval){ [ 984.731977] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]522a4f1c-c40a-7366-93d6-622192c84ad3" [ 984.731977] env[61629]: _type = "Task" [ 984.731977] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.739929] env[61629]: DEBUG nova.network.neutron [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Updating instance_info_cache with network_info: [{"id": "ff773335-988b-491d-bfab-ce5568c05be8", "address": "fa:16:3e:c5:7d:f6", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff773335-98", "ovs_interfaceid": "ff773335-988b-491d-bfab-ce5568c05be8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.751489] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]522a4f1c-c40a-7366-93d6-622192c84ad3, 'name': SearchDatastore_Task, 'duration_secs': 0.010008} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.752859] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.753458] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 0daebf05-e42b-49c5-aa24-43304a1c3cc0/0daebf05-e42b-49c5-aa24-43304a1c3cc0.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 984.754521] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7dd74356-5fc8-446e-991c-b5dbe2187fec {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.765738] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Waiting for the task: (returnval){ [ 984.765738] env[61629]: value = "task-1354495" [ 984.765738] env[61629]: _type = "Task" [ 984.765738] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.785039] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354495, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.788035] env[61629]: DEBUG nova.network.neutron [-] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.894482] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354494, 'name': ReconfigVM_Task, 'duration_secs': 0.65399} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.894780] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 984.895068] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc657387-95b0-4081-8fd8-6b9bce371ac8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.901655] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 984.901655] env[61629]: value = "task-1354496" [ 984.901655] env[61629]: _type = "Task" [ 984.901655] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.910673] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354496, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.930543] env[61629]: DEBUG oslo_concurrency.lockutils [req-59367b7f-45f0-48cd-b6b8-80fb1d7c00ed req-4a367dee-fb3f-4eb7-a1a0-7c25d4b68492 service nova] Releasing lock "refresh_cache-0daebf05-e42b-49c5-aa24-43304a1c3cc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.980550] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f461209b-745f-4148-a838-ab67504e6b7e tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "22f71f92-ca9a-4b97-a652-3f34a0dabde2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.409s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.221575] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354492, 'name': CloneVM_Task, 'duration_secs': 1.509093} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.221940] env[61629]: INFO nova.virt.vmwareapi.vmops [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Created linked-clone VM from snapshot [ 985.222779] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1678fe9-252e-4975-9fdd-ddfe85752c3e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.231920] env[61629]: DEBUG nova.virt.vmwareapi.images [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Uploading image d1404c99-66c8-439f-b330-d6263aa8a1a3 {{(pid=61629) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 985.245039] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "refresh_cache-a08e5762-5307-4dd8-a025-a1cdfd43025e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.262630] env[61629]: DEBUG oslo_vmware.rw_handles [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 985.262630] env[61629]: value = "vm-288570" [ 985.262630] env[61629]: _type = "VirtualMachine" [ 985.262630] env[61629]: }. {{(pid=61629) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 985.263376] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e9185e32-12f5-438b-86bd-066c3247e3f7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.273064] env[61629]: DEBUG oslo_vmware.rw_handles [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lease: (returnval){ [ 985.273064] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]526b4c00-1e51-ea39-3e77-956dc560c9ea" [ 985.273064] env[61629]: _type = "HttpNfcLease" [ 985.273064] env[61629]: } obtained for exporting VM: (result){ [ 985.273064] env[61629]: value = "vm-288570" [ 985.273064] env[61629]: _type = "VirtualMachine" [ 985.273064] env[61629]: }. {{(pid=61629) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 985.273534] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the lease: (returnval){ [ 985.273534] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]526b4c00-1e51-ea39-3e77-956dc560c9ea" [ 985.273534] env[61629]: _type = "HttpNfcLease" [ 985.273534] env[61629]: } to be ready. {{(pid=61629) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 985.281667] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354495, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.286898] env[61629]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 985.286898] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]526b4c00-1e51-ea39-3e77-956dc560c9ea" [ 985.286898] env[61629]: _type = "HttpNfcLease" [ 985.286898] env[61629]: } is initializing. {{(pid=61629) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 985.290591] env[61629]: INFO nova.compute.manager [-] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Took 1.56 seconds to deallocate network for instance. [ 985.391232] env[61629]: DEBUG nova.compute.manager [req-4532211d-2ed6-4a82-a10c-5ca874df992f req-c01ea396-b1c8-4619-ba4b-80007ea9eff9 service nova] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Received event network-vif-deleted-d3b1da99-eb4d-4c80-adfc-2aa87a264e1e {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 985.414566] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354496, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.649980] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "d95162d0-cc5e-4516-b76e-8d7736be1032" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.649980] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "d95162d0-cc5e-4516-b76e-8d7736be1032" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.763183] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 985.764188] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2940ea84-b1e1-4d65-bff4-01f265a6c6c0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.785717] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 985.786088] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354495, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.768569} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.786571] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35a638ed-c99f-4826-8dfb-daa681f8a984 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.788093] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 0daebf05-e42b-49c5-aa24-43304a1c3cc0/0daebf05-e42b-49c5-aa24-43304a1c3cc0.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 985.788320] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 985.790512] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2fb46cfa-3be3-42d9-99a7-8d92b0b133ee {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.792098] env[61629]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 985.792098] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]526b4c00-1e51-ea39-3e77-956dc560c9ea" [ 985.792098] env[61629]: _type = "HttpNfcLease" [ 985.792098] env[61629]: } is ready. {{(pid=61629) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 985.792772] env[61629]: DEBUG oslo_vmware.rw_handles [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 985.792772] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]526b4c00-1e51-ea39-3e77-956dc560c9ea" [ 985.792772] env[61629]: _type = "HttpNfcLease" [ 985.792772] env[61629]: }. {{(pid=61629) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 985.793814] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc50b88-40dd-467d-9142-90d115ff45a0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.798641] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Waiting for the task: (returnval){ [ 985.798641] env[61629]: value = "task-1354499" [ 985.798641] env[61629]: _type = "Task" [ 985.798641] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.802320] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.802628] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.802858] env[61629]: DEBUG nova.objects.instance [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lazy-loading 'resources' on Instance uuid 3994458a-195a-478c-b6d0-d8e36df989a3 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.803921] env[61629]: DEBUG oslo_vmware.rw_handles [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528553c7-3e56-9ae1-75e5-de7c586c833f/disk-0.vmdk from lease info. {{(pid=61629) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 985.804928] env[61629]: DEBUG oslo_vmware.rw_handles [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528553c7-3e56-9ae1-75e5-de7c586c833f/disk-0.vmdk for reading. {{(pid=61629) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 985.814998] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354499, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.872042] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 985.872271] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 985.872458] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleting the datastore file [datastore2] a08e5762-5307-4dd8-a025-a1cdfd43025e {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 985.873769] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6df73a92-acd7-48e7-8ca2-bbe787683918 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.880759] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 985.880759] env[61629]: value = "task-1354500" [ 985.880759] env[61629]: _type = "Task" [ 985.880759] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.888853] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354500, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.905787] env[61629]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b95562f4-1973-4b5e-a0c2-3c001f81639b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.918217] env[61629]: DEBUG oslo_vmware.api [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354496, 'name': PowerOnVM_Task, 'duration_secs': 0.571909} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.920078] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 985.922271] env[61629]: DEBUG nova.compute.manager [None req-82ea4322-5496-49a9-98ab-06066f4355d5 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 985.923137] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef83e8a-817e-49d5-8690-a58a0590b877 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.151600] env[61629]: DEBUG nova.compute.manager [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 986.313407] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354499, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099346} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.314033] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 986.314536] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46469fc2-a4a4-4c76-ab8c-8d98ce15c1f5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.339338] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 0daebf05-e42b-49c5-aa24-43304a1c3cc0/0daebf05-e42b-49c5-aa24-43304a1c3cc0.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 986.343052] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-199802a0-24d3-4a70-872d-0de9ed5d8f8d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.366876] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Waiting for the task: (returnval){ [ 986.366876] env[61629]: value = "task-1354501" [ 986.366876] env[61629]: _type = "Task" [ 986.366876] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.375710] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354501, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.394415] env[61629]: DEBUG oslo_vmware.api [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354500, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15378} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.397018] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 986.397366] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 986.397641] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 986.421159] env[61629]: INFO nova.scheduler.client.report [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleted allocations for instance a08e5762-5307-4dd8-a025-a1cdfd43025e [ 986.516732] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30920d6f-f2eb-4e89-aa0e-993796be60fe {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.525700] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0d1e37-e03d-4d12-8c7b-c499612191c3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.559579] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4ff649-be33-47c3-8106-dea4727f6973 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.567530] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf23c329-1cc8-4074-92bd-49409350204e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.582038] env[61629]: DEBUG nova.compute.provider_tree [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.672260] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.865010] env[61629]: INFO nova.compute.manager [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Unrescuing [ 986.865312] env[61629]: DEBUG oslo_concurrency.lockutils [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.865472] env[61629]: DEBUG oslo_concurrency.lockutils [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquired lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.865644] env[61629]: DEBUG nova.network.neutron [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 986.877868] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354501, 'name': ReconfigVM_Task, 'duration_secs': 0.290084} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.879788] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 0daebf05-e42b-49c5-aa24-43304a1c3cc0/0daebf05-e42b-49c5-aa24-43304a1c3cc0.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 986.880910] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81fd3098-9cf2-4f00-9800-3b4f5109d8b0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.889408] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Waiting for the task: (returnval){ [ 986.889408] env[61629]: value = "task-1354502" [ 986.889408] env[61629]: _type = "Task" [ 986.889408] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.902965] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354502, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.929864] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.084931] env[61629]: DEBUG nova.scheduler.client.report [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 987.400272] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354502, 'name': Rename_Task, 'duration_secs': 0.148744} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.400613] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 987.400874] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77c83e73-9043-44de-ab26-c5c356605378 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.407078] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Waiting for the task: (returnval){ [ 987.407078] env[61629]: value = "task-1354503" [ 987.407078] env[61629]: _type = "Task" [ 987.407078] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.417153] env[61629]: DEBUG oslo_concurrency.lockutils [None req-326e8236-0b7a-4a71-ad65-859b18b1d577 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "a08e5762-5307-4dd8-a025-a1cdfd43025e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.417526] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354503, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.430858] env[61629]: DEBUG nova.compute.manager [req-5c39019b-a56c-4127-a8dc-3bd7d65d1156 req-07c93c7f-de3b-4697-8f7d-258c6e8768b1 service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Received event network-vif-unplugged-ff773335-988b-491d-bfab-ce5568c05be8 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 987.431154] env[61629]: DEBUG oslo_concurrency.lockutils [req-5c39019b-a56c-4127-a8dc-3bd7d65d1156 req-07c93c7f-de3b-4697-8f7d-258c6e8768b1 service nova] Acquiring lock "a08e5762-5307-4dd8-a025-a1cdfd43025e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.431381] env[61629]: DEBUG oslo_concurrency.lockutils [req-5c39019b-a56c-4127-a8dc-3bd7d65d1156 req-07c93c7f-de3b-4697-8f7d-258c6e8768b1 service nova] Lock "a08e5762-5307-4dd8-a025-a1cdfd43025e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.431563] env[61629]: DEBUG oslo_concurrency.lockutils [req-5c39019b-a56c-4127-a8dc-3bd7d65d1156 req-07c93c7f-de3b-4697-8f7d-258c6e8768b1 service nova] Lock "a08e5762-5307-4dd8-a025-a1cdfd43025e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.431762] env[61629]: DEBUG nova.compute.manager [req-5c39019b-a56c-4127-a8dc-3bd7d65d1156 req-07c93c7f-de3b-4697-8f7d-258c6e8768b1 service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] No waiting events found dispatching network-vif-unplugged-ff773335-988b-491d-bfab-ce5568c05be8 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 987.431935] env[61629]: DEBUG nova.compute.manager [req-5c39019b-a56c-4127-a8dc-3bd7d65d1156 req-07c93c7f-de3b-4697-8f7d-258c6e8768b1 service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Received event network-vif-unplugged-ff773335-988b-491d-bfab-ce5568c05be8 for instance with task_state deleting. {{(pid=61629) _process_instance_event /opt/stack/nova/nova/compute/manager.py:10909}} [ 987.432373] env[61629]: DEBUG nova.compute.manager [req-5c39019b-a56c-4127-a8dc-3bd7d65d1156 req-07c93c7f-de3b-4697-8f7d-258c6e8768b1 service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Received event network-changed-ff773335-988b-491d-bfab-ce5568c05be8 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 987.432667] env[61629]: DEBUG nova.compute.manager [req-5c39019b-a56c-4127-a8dc-3bd7d65d1156 req-07c93c7f-de3b-4697-8f7d-258c6e8768b1 service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Refreshing instance network info cache due to event network-changed-ff773335-988b-491d-bfab-ce5568c05be8. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 987.432939] env[61629]: DEBUG oslo_concurrency.lockutils [req-5c39019b-a56c-4127-a8dc-3bd7d65d1156 req-07c93c7f-de3b-4697-8f7d-258c6e8768b1 service nova] Acquiring lock "refresh_cache-a08e5762-5307-4dd8-a025-a1cdfd43025e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.435843] env[61629]: DEBUG oslo_concurrency.lockutils [req-5c39019b-a56c-4127-a8dc-3bd7d65d1156 req-07c93c7f-de3b-4697-8f7d-258c6e8768b1 service nova] Acquired lock "refresh_cache-a08e5762-5307-4dd8-a025-a1cdfd43025e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.436254] env[61629]: DEBUG nova.network.neutron [req-5c39019b-a56c-4127-a8dc-3bd7d65d1156 req-07c93c7f-de3b-4697-8f7d-258c6e8768b1 service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Refreshing network info cache for port ff773335-988b-491d-bfab-ce5568c05be8 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 987.591934] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.789s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.595407] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.923s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.597756] env[61629]: INFO nova.compute.claims [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 987.628992] env[61629]: INFO nova.scheduler.client.report [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Deleted allocations for instance 3994458a-195a-478c-b6d0-d8e36df989a3 [ 987.850884] env[61629]: DEBUG nova.network.neutron [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Updating instance_info_cache with network_info: [{"id": "c827ba81-d74a-4ff3-bfc2-81b5e09c683c", "address": "fa:16:3e:6d:96:2d", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc827ba81-d7", "ovs_interfaceid": "c827ba81-d74a-4ff3-bfc2-81b5e09c683c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.918240] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354503, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.143383] env[61629]: DEBUG oslo_concurrency.lockutils [None req-da9599a7-3e99-4dac-9cd6-f1502349aa3c tempest-ServerDiskConfigTestJSON-1122083488 tempest-ServerDiskConfigTestJSON-1122083488-project-member] Lock "3994458a-195a-478c-b6d0-d8e36df989a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.598s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.305110] env[61629]: DEBUG nova.network.neutron [req-5c39019b-a56c-4127-a8dc-3bd7d65d1156 req-07c93c7f-de3b-4697-8f7d-258c6e8768b1 service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Updated VIF entry in instance network info cache for port ff773335-988b-491d-bfab-ce5568c05be8. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 988.305554] env[61629]: DEBUG nova.network.neutron [req-5c39019b-a56c-4127-a8dc-3bd7d65d1156 req-07c93c7f-de3b-4697-8f7d-258c6e8768b1 service nova] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Updating instance_info_cache with network_info: [{"id": "ff773335-988b-491d-bfab-ce5568c05be8", "address": "fa:16:3e:c5:7d:f6", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": null, "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapff773335-98", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.354010] env[61629]: DEBUG oslo_concurrency.lockutils [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Releasing lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.354729] env[61629]: DEBUG nova.objects.instance [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lazy-loading 'flavor' on Instance uuid 459c5f25-8fb1-4e43-8f7f-359a7ff697f2 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.419189] env[61629]: DEBUG oslo_vmware.api [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354503, 'name': PowerOnVM_Task, 'duration_secs': 0.62583} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.419514] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 988.419755] env[61629]: INFO nova.compute.manager [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Took 7.95 seconds to spawn the instance on the hypervisor. [ 988.419992] env[61629]: DEBUG nova.compute.manager [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 988.420856] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9076cf0-5b82-45f2-98b1-e524852fe43f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.767201] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ddcbc33-fbcd-4299-b9a4-9d36e8bd3451 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.779088] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48384e38-14ab-47b9-9b28-63e42564688d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.823082] env[61629]: DEBUG oslo_concurrency.lockutils [req-5c39019b-a56c-4127-a8dc-3bd7d65d1156 req-07c93c7f-de3b-4697-8f7d-258c6e8768b1 service nova] Releasing lock "refresh_cache-a08e5762-5307-4dd8-a025-a1cdfd43025e" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.823802] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbeffca1-f492-4c9c-8cf9-f88a0af3ce46 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.832687] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a39b18-80d2-400b-ad84-4855410ef37b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.847116] env[61629]: DEBUG nova.compute.provider_tree [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.862937] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1080f5-c86c-4f61-b184-c6cf45205b28 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.890833] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 988.891102] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ba5b67f-0836-4f08-9e27-f57d49c002ba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.898393] env[61629]: DEBUG oslo_vmware.api [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 988.898393] env[61629]: value = "task-1354504" [ 988.898393] env[61629]: _type = "Task" [ 988.898393] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.909883] env[61629]: DEBUG oslo_vmware.api [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354504, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.943582] env[61629]: INFO nova.compute.manager [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Took 13.55 seconds to build instance. [ 989.353028] env[61629]: DEBUG nova.scheduler.client.report [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 989.418102] env[61629]: DEBUG oslo_vmware.api [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354504, 'name': PowerOffVM_Task, 'duration_secs': 0.313863} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.418475] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 989.425804] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Reconfiguring VM instance instance-00000058 to detach disk 2002 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 989.426998] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31b30cbd-58ba-48c8-a70b-6cf6ce03bdf3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.447184] env[61629]: DEBUG oslo_concurrency.lockutils [None req-580d98bb-fd92-4954-b559-35ef43f6ca7a tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Lock "0daebf05-e42b-49c5-aa24-43304a1c3cc0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.071s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.453243] env[61629]: DEBUG oslo_vmware.api [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 989.453243] env[61629]: value = "task-1354505" [ 989.453243] env[61629]: _type = "Task" [ 989.453243] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.463186] env[61629]: DEBUG oslo_vmware.api [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354505, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.717667] env[61629]: DEBUG nova.compute.manager [req-bf6e3cd6-e995-481c-aa47-5ea07ad0db59 req-bf7cb990-1188-4fe5-a0e9-4207de38b6ae service nova] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Received event network-changed-abf6c35f-b11c-4f9e-b605-8f889ccf39ab {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 989.718281] env[61629]: DEBUG nova.compute.manager [req-bf6e3cd6-e995-481c-aa47-5ea07ad0db59 req-bf7cb990-1188-4fe5-a0e9-4207de38b6ae service nova] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Refreshing instance network info cache due to event network-changed-abf6c35f-b11c-4f9e-b605-8f889ccf39ab. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 989.718574] env[61629]: DEBUG oslo_concurrency.lockutils [req-bf6e3cd6-e995-481c-aa47-5ea07ad0db59 req-bf7cb990-1188-4fe5-a0e9-4207de38b6ae service nova] Acquiring lock "refresh_cache-0daebf05-e42b-49c5-aa24-43304a1c3cc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.718851] env[61629]: DEBUG oslo_concurrency.lockutils [req-bf6e3cd6-e995-481c-aa47-5ea07ad0db59 req-bf7cb990-1188-4fe5-a0e9-4207de38b6ae service nova] Acquired lock "refresh_cache-0daebf05-e42b-49c5-aa24-43304a1c3cc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.719511] env[61629]: DEBUG nova.network.neutron [req-bf6e3cd6-e995-481c-aa47-5ea07ad0db59 req-bf7cb990-1188-4fe5-a0e9-4207de38b6ae service nova] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Refreshing network info cache for port abf6c35f-b11c-4f9e-b605-8f889ccf39ab {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 989.857608] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.262s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.858365] env[61629]: DEBUG nova.compute.manager [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 989.861616] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.932s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.865116] env[61629]: DEBUG nova.objects.instance [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lazy-loading 'resources' on Instance uuid a08e5762-5307-4dd8-a025-a1cdfd43025e {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 989.965314] env[61629]: DEBUG oslo_vmware.api [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354505, 'name': ReconfigVM_Task, 'duration_secs': 0.450883} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.966063] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Reconfigured VM instance instance-00000058 to detach disk 2002 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 989.966498] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 989.966900] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc7202bc-bc60-4601-b9f9-1e6bef3e6138 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.975129] env[61629]: DEBUG oslo_vmware.api [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 989.975129] env[61629]: value = "task-1354506" [ 989.975129] env[61629]: _type = "Task" [ 989.975129] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.986796] env[61629]: DEBUG oslo_vmware.api [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354506, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.368280] env[61629]: DEBUG nova.compute.utils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 990.373553] env[61629]: DEBUG nova.objects.instance [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lazy-loading 'numa_topology' on Instance uuid a08e5762-5307-4dd8-a025-a1cdfd43025e {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 990.375393] env[61629]: DEBUG nova.compute.manager [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 990.375532] env[61629]: DEBUG nova.network.neutron [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 990.485585] env[61629]: DEBUG nova.policy [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c62f9a7c8b5f4ef985880339407b46a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0578ce75c37942d4ba6c8b862ceb7d92', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 990.506561] env[61629]: DEBUG oslo_vmware.api [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354506, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.674769] env[61629]: DEBUG nova.network.neutron [req-bf6e3cd6-e995-481c-aa47-5ea07ad0db59 req-bf7cb990-1188-4fe5-a0e9-4207de38b6ae service nova] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Updated VIF entry in instance network info cache for port abf6c35f-b11c-4f9e-b605-8f889ccf39ab. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 990.675195] env[61629]: DEBUG nova.network.neutron [req-bf6e3cd6-e995-481c-aa47-5ea07ad0db59 req-bf7cb990-1188-4fe5-a0e9-4207de38b6ae service nova] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Updating instance_info_cache with network_info: [{"id": "abf6c35f-b11c-4f9e-b605-8f889ccf39ab", "address": "fa:16:3e:77:e5:b9", "network": {"id": "96f751d9-f3a8-454c-b17b-00f7a06732de", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-696914612-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b9dfc16a6d1479883f21028bbb9dd58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabf6c35f-b1", "ovs_interfaceid": "abf6c35f-b11c-4f9e-b605-8f889ccf39ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.877714] env[61629]: DEBUG nova.compute.manager [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 990.879840] env[61629]: DEBUG nova.objects.base [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=61629) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 990.975102] env[61629]: DEBUG nova.network.neutron [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Successfully created port: bf1ec645-7d1d-4e20-8075-a16aa9ea0a48 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 990.994985] env[61629]: DEBUG oslo_vmware.api [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354506, 'name': PowerOnVM_Task, 'duration_secs': 0.542312} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.995360] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 990.996450] env[61629]: DEBUG nova.compute.manager [None req-41341476-9e7d-466f-afef-1ba945132218 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 990.996581] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a63cb8-30f5-4134-a12e-cf910ee61299 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.052332] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d11e70-02d7-4615-a196-fcf6bb77bb99 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.061453] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a691248-09d5-460f-9221-bf30e9f1607e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.097939] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92e9f81-0451-4d6a-abf2-8255401c86d6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.106758] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13510e11-90f7-4c2a-8b72-31009b166178 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.121340] env[61629]: DEBUG nova.compute.provider_tree [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.180887] env[61629]: DEBUG oslo_concurrency.lockutils [req-bf6e3cd6-e995-481c-aa47-5ea07ad0db59 req-bf7cb990-1188-4fe5-a0e9-4207de38b6ae service nova] Releasing lock "refresh_cache-0daebf05-e42b-49c5-aa24-43304a1c3cc0" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.626842] env[61629]: DEBUG nova.scheduler.client.report [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 991.894385] env[61629]: DEBUG nova.compute.manager [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 991.933494] env[61629]: DEBUG nova.virt.hardware [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 991.934023] env[61629]: DEBUG nova.virt.hardware [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 991.934426] env[61629]: DEBUG nova.virt.hardware [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 991.934780] env[61629]: DEBUG nova.virt.hardware [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 991.935081] env[61629]: DEBUG nova.virt.hardware [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 991.935364] env[61629]: DEBUG nova.virt.hardware [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 991.935962] env[61629]: DEBUG nova.virt.hardware [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 991.936300] env[61629]: DEBUG nova.virt.hardware [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 991.936627] env[61629]: DEBUG nova.virt.hardware [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 991.936937] env[61629]: DEBUG nova.virt.hardware [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 991.937313] env[61629]: DEBUG nova.virt.hardware [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 991.938361] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquiring lock "9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.938747] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Lock "9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.942019] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.942019] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.942019] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.942019] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.942019] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.943927] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af66d87-cce9-4f9d-89b4-a9ddfeb82d8b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.947697] env[61629]: INFO nova.compute.manager [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Terminating instance [ 991.951481] env[61629]: DEBUG nova.compute.manager [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 991.952481] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 991.954302] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31cbf6a8-af97-4a7d-b664-8c66383e8b61 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.969087] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58001ec-e634-4717-893c-c4a532012b37 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.975385] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 991.975892] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-804c45c9-89f9-4c4d-b3e7-0ec103000079 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.989276] env[61629]: DEBUG oslo_vmware.api [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 991.989276] env[61629]: value = "task-1354507" [ 991.989276] env[61629]: _type = "Task" [ 991.989276] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.999557] env[61629]: DEBUG oslo_vmware.api [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354507, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.127765] env[61629]: DEBUG nova.compute.manager [req-4928bdb1-ba05-4e87-8b38-9e64a4c7bae5 req-5a9ae10e-5c32-4797-a64d-7fd020a34e72 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Received event network-changed-c827ba81-d74a-4ff3-bfc2-81b5e09c683c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 992.127970] env[61629]: DEBUG nova.compute.manager [req-4928bdb1-ba05-4e87-8b38-9e64a4c7bae5 req-5a9ae10e-5c32-4797-a64d-7fd020a34e72 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Refreshing instance network info cache due to event network-changed-c827ba81-d74a-4ff3-bfc2-81b5e09c683c. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 992.128206] env[61629]: DEBUG oslo_concurrency.lockutils [req-4928bdb1-ba05-4e87-8b38-9e64a4c7bae5 req-5a9ae10e-5c32-4797-a64d-7fd020a34e72 service nova] Acquiring lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.128353] env[61629]: DEBUG oslo_concurrency.lockutils [req-4928bdb1-ba05-4e87-8b38-9e64a4c7bae5 req-5a9ae10e-5c32-4797-a64d-7fd020a34e72 service nova] Acquired lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.128532] env[61629]: DEBUG nova.network.neutron [req-4928bdb1-ba05-4e87-8b38-9e64a4c7bae5 req-5a9ae10e-5c32-4797-a64d-7fd020a34e72 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Refreshing network info cache for port c827ba81-d74a-4ff3-bfc2-81b5e09c683c {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 992.133031] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.271s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.451928] env[61629]: DEBUG nova.compute.manager [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 992.501682] env[61629]: DEBUG oslo_vmware.api [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354507, 'name': PowerOffVM_Task, 'duration_secs': 0.320993} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.501976] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 992.502167] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 992.502431] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-616aeb8e-d5ee-42dd-87da-9c647a3e111c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.588064] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 992.588064] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 992.588064] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Deleting the datastore file [datastore1] fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.588064] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b31f527-0d02-41da-9f14-71ace74f6d6c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.597401] env[61629]: DEBUG oslo_vmware.api [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for the task: (returnval){ [ 992.597401] env[61629]: value = "task-1354509" [ 992.597401] env[61629]: _type = "Task" [ 992.597401] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.606397] env[61629]: DEBUG oslo_vmware.api [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354509, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.641210] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9a4780f8-3f81-47c7-b00f-428a46a8c1b0 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a08e5762-5307-4dd8-a025-a1cdfd43025e" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.394s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.642196] env[61629]: DEBUG oslo_concurrency.lockutils [None req-326e8236-0b7a-4a71-ad65-859b18b1d577 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a08e5762-5307-4dd8-a025-a1cdfd43025e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 5.226s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.642425] env[61629]: DEBUG oslo_concurrency.lockutils [None req-326e8236-0b7a-4a71-ad65-859b18b1d577 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "a08e5762-5307-4dd8-a025-a1cdfd43025e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.642633] env[61629]: DEBUG oslo_concurrency.lockutils [None req-326e8236-0b7a-4a71-ad65-859b18b1d577 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a08e5762-5307-4dd8-a025-a1cdfd43025e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.642806] env[61629]: DEBUG oslo_concurrency.lockutils [None req-326e8236-0b7a-4a71-ad65-859b18b1d577 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a08e5762-5307-4dd8-a025-a1cdfd43025e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.644773] env[61629]: INFO nova.compute.manager [None req-326e8236-0b7a-4a71-ad65-859b18b1d577 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Terminating instance [ 992.646968] env[61629]: DEBUG nova.compute.manager [None req-326e8236-0b7a-4a71-ad65-859b18b1d577 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 992.646968] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-326e8236-0b7a-4a71-ad65-859b18b1d577 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 992.647308] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-35fe2c75-1b4b-4d8e-abf2-94e5a5b2be5f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.661064] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ef780f-e8a6-419d-b21a-24c60439c263 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.698266] env[61629]: WARNING nova.virt.vmwareapi.vmops [None req-326e8236-0b7a-4a71-ad65-859b18b1d577 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a08e5762-5307-4dd8-a025-a1cdfd43025e could not be found. [ 992.698512] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-326e8236-0b7a-4a71-ad65-859b18b1d577 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 992.699151] env[61629]: INFO nova.compute.manager [None req-326e8236-0b7a-4a71-ad65-859b18b1d577 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 992.699151] env[61629]: DEBUG oslo.service.loopingcall [None req-326e8236-0b7a-4a71-ad65-859b18b1d577 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.699311] env[61629]: DEBUG nova.compute.manager [-] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 992.699404] env[61629]: DEBUG nova.network.neutron [-] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 992.934058] env[61629]: DEBUG nova.compute.manager [req-b36b82db-aa86-499f-bc7c-feeef0dc1a7e req-3e478e12-f1ce-4448-a75c-0072ad9b3596 service nova] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Received event network-vif-plugged-bf1ec645-7d1d-4e20-8075-a16aa9ea0a48 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 992.934324] env[61629]: DEBUG oslo_concurrency.lockutils [req-b36b82db-aa86-499f-bc7c-feeef0dc1a7e req-3e478e12-f1ce-4448-a75c-0072ad9b3596 service nova] Acquiring lock "d95162d0-cc5e-4516-b76e-8d7736be1032-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.934557] env[61629]: DEBUG oslo_concurrency.lockutils [req-b36b82db-aa86-499f-bc7c-feeef0dc1a7e req-3e478e12-f1ce-4448-a75c-0072ad9b3596 service nova] Lock "d95162d0-cc5e-4516-b76e-8d7736be1032-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.935081] env[61629]: DEBUG oslo_concurrency.lockutils [req-b36b82db-aa86-499f-bc7c-feeef0dc1a7e req-3e478e12-f1ce-4448-a75c-0072ad9b3596 service nova] Lock "d95162d0-cc5e-4516-b76e-8d7736be1032-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.935813] env[61629]: DEBUG nova.compute.manager [req-b36b82db-aa86-499f-bc7c-feeef0dc1a7e req-3e478e12-f1ce-4448-a75c-0072ad9b3596 service nova] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] No waiting events found dispatching network-vif-plugged-bf1ec645-7d1d-4e20-8075-a16aa9ea0a48 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 992.936074] env[61629]: WARNING nova.compute.manager [req-b36b82db-aa86-499f-bc7c-feeef0dc1a7e req-3e478e12-f1ce-4448-a75c-0072ad9b3596 service nova] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Received unexpected event network-vif-plugged-bf1ec645-7d1d-4e20-8075-a16aa9ea0a48 for instance with vm_state building and task_state spawning. [ 992.989596] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.989855] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.991402] env[61629]: INFO nova.compute.claims [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 993.097888] env[61629]: DEBUG nova.network.neutron [req-4928bdb1-ba05-4e87-8b38-9e64a4c7bae5 req-5a9ae10e-5c32-4797-a64d-7fd020a34e72 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Updated VIF entry in instance network info cache for port c827ba81-d74a-4ff3-bfc2-81b5e09c683c. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 993.098366] env[61629]: DEBUG nova.network.neutron [req-4928bdb1-ba05-4e87-8b38-9e64a4c7bae5 req-5a9ae10e-5c32-4797-a64d-7fd020a34e72 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Updating instance_info_cache with network_info: [{"id": "c827ba81-d74a-4ff3-bfc2-81b5e09c683c", "address": "fa:16:3e:6d:96:2d", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc827ba81-d7", "ovs_interfaceid": "c827ba81-d74a-4ff3-bfc2-81b5e09c683c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.115806] env[61629]: DEBUG oslo_vmware.api [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354509, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.139284] env[61629]: DEBUG nova.network.neutron [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Successfully updated port: bf1ec645-7d1d-4e20-8075-a16aa9ea0a48 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 993.182950] env[61629]: DEBUG oslo_concurrency.lockutils [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquiring lock "a83f05b7-f998-4f45-afc1-836fae7c4b95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.183216] env[61629]: DEBUG oslo_concurrency.lockutils [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Lock "a83f05b7-f998-4f45-afc1-836fae7c4b95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.499286] env[61629]: DEBUG nova.network.neutron [-] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.528318] env[61629]: DEBUG oslo_vmware.rw_handles [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528553c7-3e56-9ae1-75e5-de7c586c833f/disk-0.vmdk. {{(pid=61629) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 993.529479] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3cb21b1-9fe6-4986-a4f2-bfd73ba4d117 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.536536] env[61629]: DEBUG oslo_vmware.rw_handles [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528553c7-3e56-9ae1-75e5-de7c586c833f/disk-0.vmdk is in state: ready. {{(pid=61629) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 993.536713] env[61629]: ERROR oslo_vmware.rw_handles [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528553c7-3e56-9ae1-75e5-de7c586c833f/disk-0.vmdk due to incomplete transfer. [ 993.536950] env[61629]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f0242278-d0fc-45a3-abb2-7d98924729a3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.544385] env[61629]: DEBUG oslo_vmware.rw_handles [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528553c7-3e56-9ae1-75e5-de7c586c833f/disk-0.vmdk. {{(pid=61629) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 993.544583] env[61629]: DEBUG nova.virt.vmwareapi.images [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Uploaded image d1404c99-66c8-439f-b330-d6263aa8a1a3 to the Glance image server {{(pid=61629) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 993.546951] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Destroying the VM {{(pid=61629) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 993.547224] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-39b0f269-15b2-4b70-84c7-3a937a0f090f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.553868] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 993.553868] env[61629]: value = "task-1354510" [ 993.553868] env[61629]: _type = "Task" [ 993.553868] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.562527] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354510, 'name': Destroy_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.612609] env[61629]: DEBUG oslo_concurrency.lockutils [req-4928bdb1-ba05-4e87-8b38-9e64a4c7bae5 req-5a9ae10e-5c32-4797-a64d-7fd020a34e72 service nova] Releasing lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.613065] env[61629]: DEBUG oslo_vmware.api [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354509, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.642024] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "refresh_cache-d95162d0-cc5e-4516-b76e-8d7736be1032" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.642154] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "refresh_cache-d95162d0-cc5e-4516-b76e-8d7736be1032" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.642312] env[61629]: DEBUG nova.network.neutron [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 993.686120] env[61629]: DEBUG nova.compute.manager [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 994.004977] env[61629]: INFO nova.compute.manager [-] [instance: a08e5762-5307-4dd8-a025-a1cdfd43025e] Took 1.31 seconds to deallocate network for instance. [ 994.064794] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354510, 'name': Destroy_Task, 'duration_secs': 0.352397} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.064881] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Destroyed the VM [ 994.065107] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Deleting Snapshot of the VM instance {{(pid=61629) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 994.065367] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fb59c16b-d810-4a6c-990c-b4ba603e60eb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.073647] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 994.073647] env[61629]: value = "task-1354511" [ 994.073647] env[61629]: _type = "Task" [ 994.073647] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.082428] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354511, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.111576] env[61629]: DEBUG oslo_vmware.api [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354509, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.143253] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335c5261-cf59-4ce0-8393-7e75e4957db5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.154076] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b54594b-3160-433c-990b-5bc43cbaf88e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.160341] env[61629]: DEBUG nova.compute.manager [req-c13fa065-f789-4c6c-942d-4838875db72f req-823889f9-dc75-409a-8e95-878293c733e4 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Received event network-changed-c827ba81-d74a-4ff3-bfc2-81b5e09c683c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 994.160341] env[61629]: DEBUG nova.compute.manager [req-c13fa065-f789-4c6c-942d-4838875db72f req-823889f9-dc75-409a-8e95-878293c733e4 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Refreshing instance network info cache due to event network-changed-c827ba81-d74a-4ff3-bfc2-81b5e09c683c. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 994.160744] env[61629]: DEBUG oslo_concurrency.lockutils [req-c13fa065-f789-4c6c-942d-4838875db72f req-823889f9-dc75-409a-8e95-878293c733e4 service nova] Acquiring lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.160744] env[61629]: DEBUG oslo_concurrency.lockutils [req-c13fa065-f789-4c6c-942d-4838875db72f req-823889f9-dc75-409a-8e95-878293c733e4 service nova] Acquired lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.160807] env[61629]: DEBUG nova.network.neutron [req-c13fa065-f789-4c6c-942d-4838875db72f req-823889f9-dc75-409a-8e95-878293c733e4 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Refreshing network info cache for port c827ba81-d74a-4ff3-bfc2-81b5e09c683c {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 994.194629] env[61629]: DEBUG nova.network.neutron [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 994.199065] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd116ee-1fec-4054-ae6c-f9f448314da7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.209583] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36a39e5-3637-436a-96cd-1d83ddf0e035 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.214059] env[61629]: DEBUG oslo_concurrency.lockutils [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.226770] env[61629]: DEBUG nova.compute.provider_tree [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.384104] env[61629]: DEBUG nova.network.neutron [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Updating instance_info_cache with network_info: [{"id": "bf1ec645-7d1d-4e20-8075-a16aa9ea0a48", "address": "fa:16:3e:a4:e0:09", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf1ec645-7d", "ovs_interfaceid": "bf1ec645-7d1d-4e20-8075-a16aa9ea0a48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.584795] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354511, 'name': RemoveSnapshot_Task, 'duration_secs': 0.399466} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.585042] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Deleted Snapshot of the VM instance {{(pid=61629) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 994.585321] env[61629]: DEBUG nova.compute.manager [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 994.586104] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f816ee3c-a487-44a1-8a63-b98b48391418 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.611067] env[61629]: DEBUG oslo_vmware.api [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Task: {'id': task-1354509, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.851312} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.611320] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 994.611508] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 994.611685] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 994.611860] env[61629]: INFO nova.compute.manager [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Took 2.66 seconds to destroy the instance on the hypervisor. [ 994.612118] env[61629]: DEBUG oslo.service.loopingcall [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 994.612312] env[61629]: DEBUG nova.compute.manager [-] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 994.612407] env[61629]: DEBUG nova.network.neutron [-] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 994.729733] env[61629]: DEBUG nova.scheduler.client.report [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 994.887198] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "refresh_cache-d95162d0-cc5e-4516-b76e-8d7736be1032" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.887538] env[61629]: DEBUG nova.compute.manager [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Instance network_info: |[{"id": "bf1ec645-7d1d-4e20-8075-a16aa9ea0a48", "address": "fa:16:3e:a4:e0:09", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf1ec645-7d", "ovs_interfaceid": "bf1ec645-7d1d-4e20-8075-a16aa9ea0a48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 994.887963] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:e0:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf1ec645-7d1d-4e20-8075-a16aa9ea0a48', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 994.895625] env[61629]: DEBUG oslo.service.loopingcall [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 994.895858] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 994.896124] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e51bcd34-97a6-4853-949a-e56e4f3e72ba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.919544] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 994.919544] env[61629]: value = "task-1354512" [ 994.919544] env[61629]: _type = "Task" [ 994.919544] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.927580] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354512, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.963169] env[61629]: DEBUG nova.compute.manager [req-7305f5b0-250d-474e-aac3-e4d21db03703 req-0d29cd04-0a47-40fb-b02c-e1dde86f5623 service nova] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Received event network-changed-bf1ec645-7d1d-4e20-8075-a16aa9ea0a48 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 994.963407] env[61629]: DEBUG nova.compute.manager [req-7305f5b0-250d-474e-aac3-e4d21db03703 req-0d29cd04-0a47-40fb-b02c-e1dde86f5623 service nova] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Refreshing instance network info cache due to event network-changed-bf1ec645-7d1d-4e20-8075-a16aa9ea0a48. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 994.963636] env[61629]: DEBUG oslo_concurrency.lockutils [req-7305f5b0-250d-474e-aac3-e4d21db03703 req-0d29cd04-0a47-40fb-b02c-e1dde86f5623 service nova] Acquiring lock "refresh_cache-d95162d0-cc5e-4516-b76e-8d7736be1032" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.963787] env[61629]: DEBUG oslo_concurrency.lockutils [req-7305f5b0-250d-474e-aac3-e4d21db03703 req-0d29cd04-0a47-40fb-b02c-e1dde86f5623 service nova] Acquired lock "refresh_cache-d95162d0-cc5e-4516-b76e-8d7736be1032" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.963955] env[61629]: DEBUG nova.network.neutron [req-7305f5b0-250d-474e-aac3-e4d21db03703 req-0d29cd04-0a47-40fb-b02c-e1dde86f5623 service nova] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Refreshing network info cache for port bf1ec645-7d1d-4e20-8075-a16aa9ea0a48 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 995.030971] env[61629]: DEBUG nova.network.neutron [req-c13fa065-f789-4c6c-942d-4838875db72f req-823889f9-dc75-409a-8e95-878293c733e4 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Updated VIF entry in instance network info cache for port c827ba81-d74a-4ff3-bfc2-81b5e09c683c. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 995.031487] env[61629]: DEBUG nova.network.neutron [req-c13fa065-f789-4c6c-942d-4838875db72f req-823889f9-dc75-409a-8e95-878293c733e4 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Updating instance_info_cache with network_info: [{"id": "c827ba81-d74a-4ff3-bfc2-81b5e09c683c", "address": "fa:16:3e:6d:96:2d", "network": {"id": "a1fb78c4-7c5c-4692-86e0-3111b87b44c2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1355821875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87909880104e4519b42cb204f366af3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc827ba81-d7", "ovs_interfaceid": "c827ba81-d74a-4ff3-bfc2-81b5e09c683c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.033113] env[61629]: DEBUG oslo_concurrency.lockutils [None req-326e8236-0b7a-4a71-ad65-859b18b1d577 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "a08e5762-5307-4dd8-a025-a1cdfd43025e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.391s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.101213] env[61629]: INFO nova.compute.manager [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Shelve offloading [ 995.109281] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 995.109720] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46e44488-4714-4845-b0cf-d0228488cf87 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.124119] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 995.124119] env[61629]: value = "task-1354513" [ 995.124119] env[61629]: _type = "Task" [ 995.124119] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.140789] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] VM already powered off {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 995.141050] env[61629]: DEBUG nova.compute.manager [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 995.141864] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9d0a60-35f3-4501-b2f9-28be32955072 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.149040] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.149342] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquired lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.149411] env[61629]: DEBUG nova.network.neutron [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 995.234372] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.244s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.234958] env[61629]: DEBUG nova.compute.manager [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 995.237703] env[61629]: DEBUG oslo_concurrency.lockutils [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.024s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.239208] env[61629]: INFO nova.compute.claims [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 995.435631] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354512, 'name': CreateVM_Task, 'duration_secs': 0.340743} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.435733] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 995.437597] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.437819] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.438190] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 995.438734] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a44b1787-684d-4f2f-831f-7a418de9094c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.445150] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 995.445150] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5273e1fd-9eb2-9d2b-0313-219d4e53170b" [ 995.445150] env[61629]: _type = "Task" [ 995.445150] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.455643] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5273e1fd-9eb2-9d2b-0313-219d4e53170b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.535119] env[61629]: DEBUG oslo_concurrency.lockutils [req-c13fa065-f789-4c6c-942d-4838875db72f req-823889f9-dc75-409a-8e95-878293c733e4 service nova] Releasing lock "refresh_cache-459c5f25-8fb1-4e43-8f7f-359a7ff697f2" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.707559] env[61629]: DEBUG nova.network.neutron [req-7305f5b0-250d-474e-aac3-e4d21db03703 req-0d29cd04-0a47-40fb-b02c-e1dde86f5623 service nova] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Updated VIF entry in instance network info cache for port bf1ec645-7d1d-4e20-8075-a16aa9ea0a48. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 995.708068] env[61629]: DEBUG nova.network.neutron [req-7305f5b0-250d-474e-aac3-e4d21db03703 req-0d29cd04-0a47-40fb-b02c-e1dde86f5623 service nova] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Updating instance_info_cache with network_info: [{"id": "bf1ec645-7d1d-4e20-8075-a16aa9ea0a48", "address": "fa:16:3e:a4:e0:09", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf1ec645-7d", "ovs_interfaceid": "bf1ec645-7d1d-4e20-8075-a16aa9ea0a48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.743845] env[61629]: DEBUG nova.compute.utils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 995.750525] env[61629]: DEBUG nova.compute.manager [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 995.750525] env[61629]: DEBUG nova.network.neutron [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 995.751193] env[61629]: DEBUG nova.network.neutron [-] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.829339] env[61629]: DEBUG nova.policy [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e533ab909d784fca92e31cc6c296343f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '92d74dfdbfa74614b9950031e913799d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 995.956392] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5273e1fd-9eb2-9d2b-0313-219d4e53170b, 'name': SearchDatastore_Task, 'duration_secs': 0.010409} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.956641] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.956887] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 995.957138] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.957294] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.957476] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 995.957745] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32823ad0-945f-4db0-9fb6-8c4153dc6aab {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.967557] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 995.967557] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 995.968408] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f736435a-fd3a-4894-a683-2d651ab3b8d1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.971367] env[61629]: DEBUG nova.network.neutron [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Updating instance_info_cache with network_info: [{"id": "57805f12-9b81-4485-8f3a-32567ed40a8c", "address": "fa:16:3e:ca:29:87", "network": {"id": "249c4ba3-38e0-421a-91b6-cf97f90eb535", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1700423127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd318d29ec50427eb997c83837120c9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57805f12-9b", "ovs_interfaceid": "57805f12-9b81-4485-8f3a-32567ed40a8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.976246] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 995.976246] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]526ca8c5-20d2-7071-80b2-b397c95227ad" [ 995.976246] env[61629]: _type = "Task" [ 995.976246] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.985286] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526ca8c5-20d2-7071-80b2-b397c95227ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.164338] env[61629]: DEBUG nova.network.neutron [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Successfully created port: de2e75fd-8c5a-4959-ac73-80b64539caa3 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 996.178490] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "7da77dea-fea2-43a6-a98a-6c492d1a041b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.178795] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "7da77dea-fea2-43a6-a98a-6c492d1a041b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.211432] env[61629]: DEBUG oslo_concurrency.lockutils [req-7305f5b0-250d-474e-aac3-e4d21db03703 req-0d29cd04-0a47-40fb-b02c-e1dde86f5623 service nova] Releasing lock "refresh_cache-d95162d0-cc5e-4516-b76e-8d7736be1032" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.254778] env[61629]: DEBUG nova.compute.manager [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 996.258743] env[61629]: INFO nova.compute.manager [-] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Took 1.65 seconds to deallocate network for instance. [ 996.403348] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709f6b98-3271-4559-9930-97ef495bb12a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.412795] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8b5162-9929-41fe-966f-6309096186fd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.445074] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3eeeaca-57a1-4394-a7f7-7af6acc9db81 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.453357] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cab4013-6841-4fd0-a07a-ab7b95267847 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.467281] env[61629]: DEBUG nova.compute.provider_tree [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 996.474737] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Releasing lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.486379] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526ca8c5-20d2-7071-80b2-b397c95227ad, 'name': SearchDatastore_Task, 'duration_secs': 0.009583} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.487533] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51da79ea-04fc-4d31-bbe9-affc90ce308c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.493349] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 996.493349] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52911c64-95a8-dada-a003-f159f23a3e7c" [ 996.493349] env[61629]: _type = "Task" [ 996.493349] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.502131] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52911c64-95a8-dada-a003-f159f23a3e7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.683640] env[61629]: DEBUG nova.compute.manager [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 996.731201] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 996.732140] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ac3945-ceba-4e28-996d-e446a5f0ce33 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.740958] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 996.741214] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7801b22e-cab7-4f7e-b53e-f0c622d2f664 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.768427] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.810148] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 996.810148] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 996.810148] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Deleting the datastore file [datastore2] c5b6f6b8-587c-4b74-bc83-98dac319b15b {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 996.810696] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a707350-4f6d-4c39-84c5-c24479627aa8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.817442] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 996.817442] env[61629]: value = "task-1354515" [ 996.817442] env[61629]: _type = "Task" [ 996.817442] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.825889] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354515, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.970532] env[61629]: DEBUG nova.scheduler.client.report [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 996.992769] env[61629]: DEBUG nova.compute.manager [req-58d38051-741a-45f9-8173-9216f02382ae req-4eb6e734-941c-4a4f-b33e-4d0f25472800 service nova] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Received event network-vif-deleted-b8a895f7-ad9d-4d49-8460-de82459d88f7 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 996.993015] env[61629]: DEBUG nova.compute.manager [req-58d38051-741a-45f9-8173-9216f02382ae req-4eb6e734-941c-4a4f-b33e-4d0f25472800 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Received event network-vif-unplugged-57805f12-9b81-4485-8f3a-32567ed40a8c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 996.993221] env[61629]: DEBUG oslo_concurrency.lockutils [req-58d38051-741a-45f9-8173-9216f02382ae req-4eb6e734-941c-4a4f-b33e-4d0f25472800 service nova] Acquiring lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.993427] env[61629]: DEBUG oslo_concurrency.lockutils [req-58d38051-741a-45f9-8173-9216f02382ae req-4eb6e734-941c-4a4f-b33e-4d0f25472800 service nova] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.993597] env[61629]: DEBUG oslo_concurrency.lockutils [req-58d38051-741a-45f9-8173-9216f02382ae req-4eb6e734-941c-4a4f-b33e-4d0f25472800 service nova] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.993764] env[61629]: DEBUG nova.compute.manager [req-58d38051-741a-45f9-8173-9216f02382ae req-4eb6e734-941c-4a4f-b33e-4d0f25472800 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] No waiting events found dispatching network-vif-unplugged-57805f12-9b81-4485-8f3a-32567ed40a8c {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 996.993933] env[61629]: WARNING nova.compute.manager [req-58d38051-741a-45f9-8173-9216f02382ae req-4eb6e734-941c-4a4f-b33e-4d0f25472800 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Received unexpected event network-vif-unplugged-57805f12-9b81-4485-8f3a-32567ed40a8c for instance with vm_state shelved and task_state shelving_offloading. [ 996.994110] env[61629]: DEBUG nova.compute.manager [req-58d38051-741a-45f9-8173-9216f02382ae req-4eb6e734-941c-4a4f-b33e-4d0f25472800 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Received event network-changed-57805f12-9b81-4485-8f3a-32567ed40a8c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 996.994267] env[61629]: DEBUG nova.compute.manager [req-58d38051-741a-45f9-8173-9216f02382ae req-4eb6e734-941c-4a4f-b33e-4d0f25472800 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Refreshing instance network info cache due to event network-changed-57805f12-9b81-4485-8f3a-32567ed40a8c. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 996.994451] env[61629]: DEBUG oslo_concurrency.lockutils [req-58d38051-741a-45f9-8173-9216f02382ae req-4eb6e734-941c-4a4f-b33e-4d0f25472800 service nova] Acquiring lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.994586] env[61629]: DEBUG oslo_concurrency.lockutils [req-58d38051-741a-45f9-8173-9216f02382ae req-4eb6e734-941c-4a4f-b33e-4d0f25472800 service nova] Acquired lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.994740] env[61629]: DEBUG nova.network.neutron [req-58d38051-741a-45f9-8173-9216f02382ae req-4eb6e734-941c-4a4f-b33e-4d0f25472800 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Refreshing network info cache for port 57805f12-9b81-4485-8f3a-32567ed40a8c {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 997.006934] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52911c64-95a8-dada-a003-f159f23a3e7c, 'name': SearchDatastore_Task, 'duration_secs': 0.010532} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.007220] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.007517] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] d95162d0-cc5e-4516-b76e-8d7736be1032/d95162d0-cc5e-4516-b76e-8d7736be1032.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 997.007760] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f5d913a-d078-4cb4-8c02-117512095bfc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.016337] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 997.016337] env[61629]: value = "task-1354516" [ 997.016337] env[61629]: _type = "Task" [ 997.016337] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.024654] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354516, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.205922] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.266100] env[61629]: DEBUG nova.compute.manager [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 997.291797] env[61629]: DEBUG nova.virt.hardware [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 997.292073] env[61629]: DEBUG nova.virt.hardware [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 997.292235] env[61629]: DEBUG nova.virt.hardware [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.292417] env[61629]: DEBUG nova.virt.hardware [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 997.292569] env[61629]: DEBUG nova.virt.hardware [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.292775] env[61629]: DEBUG nova.virt.hardware [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 997.292989] env[61629]: DEBUG nova.virt.hardware [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 997.293164] env[61629]: DEBUG nova.virt.hardware [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 997.293334] env[61629]: DEBUG nova.virt.hardware [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 997.293502] env[61629]: DEBUG nova.virt.hardware [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 997.293677] env[61629]: DEBUG nova.virt.hardware [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 997.294632] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9079eb18-72e4-4804-ac3f-8c7cd42fda6b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.303263] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50076ca3-cd10-48e9-b34c-61479a6ad173 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.326008] env[61629]: DEBUG oslo_vmware.api [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354515, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128063} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.326289] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 997.326476] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 997.326673] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 997.349089] env[61629]: INFO nova.scheduler.client.report [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Deleted allocations for instance c5b6f6b8-587c-4b74-bc83-98dac319b15b [ 997.475376] env[61629]: DEBUG oslo_concurrency.lockutils [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.238s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.475931] env[61629]: DEBUG nova.compute.manager [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 997.478462] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.710s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.478752] env[61629]: DEBUG nova.objects.instance [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lazy-loading 'resources' on Instance uuid fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 997.526183] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354516, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.853691] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.883109] env[61629]: DEBUG nova.network.neutron [req-58d38051-741a-45f9-8173-9216f02382ae req-4eb6e734-941c-4a4f-b33e-4d0f25472800 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Updated VIF entry in instance network info cache for port 57805f12-9b81-4485-8f3a-32567ed40a8c. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 997.883716] env[61629]: DEBUG nova.network.neutron [req-58d38051-741a-45f9-8173-9216f02382ae req-4eb6e734-941c-4a4f-b33e-4d0f25472800 service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Updating instance_info_cache with network_info: [{"id": "57805f12-9b81-4485-8f3a-32567ed40a8c", "address": "fa:16:3e:ca:29:87", "network": {"id": "249c4ba3-38e0-421a-91b6-cf97f90eb535", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1700423127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd318d29ec50427eb997c83837120c9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap57805f12-9b", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.976448] env[61629]: DEBUG nova.network.neutron [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Successfully updated port: de2e75fd-8c5a-4959-ac73-80b64539caa3 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 997.981471] env[61629]: DEBUG nova.compute.utils [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 997.985329] env[61629]: DEBUG nova.compute.manager [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Not allocating networking since 'none' was specified. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 998.029289] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354516, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.120990] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c74e6f-2173-4523-ab7d-23b4e0e61ca8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.129477] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de7bafe-9b27-4f40-9670-83f2c9692160 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.159562] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2bd34ba-ea00-43b3-a0c2-8d9d4f6cde57 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.167561] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf63eb08-6e04-4e80-a549-218e65e744bc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.180985] env[61629]: DEBUG nova.compute.provider_tree [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 998.388249] env[61629]: DEBUG oslo_concurrency.lockutils [req-58d38051-741a-45f9-8173-9216f02382ae req-4eb6e734-941c-4a4f-b33e-4d0f25472800 service nova] Releasing lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.478809] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquiring lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.479358] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquired lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.479358] env[61629]: DEBUG nova.network.neutron [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 998.487259] env[61629]: DEBUG nova.compute.manager [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 998.530035] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354516, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.684087] env[61629]: DEBUG nova.scheduler.client.report [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 999.018361] env[61629]: DEBUG nova.network.neutron [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 999.025750] env[61629]: DEBUG nova.compute.manager [req-544378d6-b2f6-4bc3-9f4c-6d4b9aa1fd63 req-9cc318cd-5b2c-4327-8bb3-f1bc71b54108 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Received event network-vif-plugged-de2e75fd-8c5a-4959-ac73-80b64539caa3 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 999.025981] env[61629]: DEBUG oslo_concurrency.lockutils [req-544378d6-b2f6-4bc3-9f4c-6d4b9aa1fd63 req-9cc318cd-5b2c-4327-8bb3-f1bc71b54108 service nova] Acquiring lock "9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.026207] env[61629]: DEBUG oslo_concurrency.lockutils [req-544378d6-b2f6-4bc3-9f4c-6d4b9aa1fd63 req-9cc318cd-5b2c-4327-8bb3-f1bc71b54108 service nova] Lock "9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.026400] env[61629]: DEBUG oslo_concurrency.lockutils [req-544378d6-b2f6-4bc3-9f4c-6d4b9aa1fd63 req-9cc318cd-5b2c-4327-8bb3-f1bc71b54108 service nova] Lock "9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.026550] env[61629]: DEBUG nova.compute.manager [req-544378d6-b2f6-4bc3-9f4c-6d4b9aa1fd63 req-9cc318cd-5b2c-4327-8bb3-f1bc71b54108 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] No waiting events found dispatching network-vif-plugged-de2e75fd-8c5a-4959-ac73-80b64539caa3 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 999.026718] env[61629]: WARNING nova.compute.manager [req-544378d6-b2f6-4bc3-9f4c-6d4b9aa1fd63 req-9cc318cd-5b2c-4327-8bb3-f1bc71b54108 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Received unexpected event network-vif-plugged-de2e75fd-8c5a-4959-ac73-80b64539caa3 for instance with vm_state building and task_state spawning. [ 999.026884] env[61629]: DEBUG nova.compute.manager [req-544378d6-b2f6-4bc3-9f4c-6d4b9aa1fd63 req-9cc318cd-5b2c-4327-8bb3-f1bc71b54108 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Received event network-changed-de2e75fd-8c5a-4959-ac73-80b64539caa3 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 999.027053] env[61629]: DEBUG nova.compute.manager [req-544378d6-b2f6-4bc3-9f4c-6d4b9aa1fd63 req-9cc318cd-5b2c-4327-8bb3-f1bc71b54108 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Refreshing instance network info cache due to event network-changed-de2e75fd-8c5a-4959-ac73-80b64539caa3. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 999.027227] env[61629]: DEBUG oslo_concurrency.lockutils [req-544378d6-b2f6-4bc3-9f4c-6d4b9aa1fd63 req-9cc318cd-5b2c-4327-8bb3-f1bc71b54108 service nova] Acquiring lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.035525] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354516, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.153882] env[61629]: DEBUG nova.network.neutron [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Updating instance_info_cache with network_info: [{"id": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "address": "fa:16:3e:57:79:05", "network": {"id": "93a87623-e06d-4557-b118-c5170f70390e", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-948282834-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92d74dfdbfa74614b9950031e913799d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "423047aa-c430-4593-a76c-9982c15c81cf", "external-id": "nsx-vlan-transportzone-262", "segmentation_id": 262, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde2e75fd-8c", "ovs_interfaceid": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.188555] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.710s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.191335] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.986s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.192889] env[61629]: INFO nova.compute.claims [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 999.211665] env[61629]: INFO nova.scheduler.client.report [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Deleted allocations for instance fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4 [ 999.412936] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.496516] env[61629]: DEBUG nova.compute.manager [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 999.523194] env[61629]: DEBUG nova.virt.hardware [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 999.523466] env[61629]: DEBUG nova.virt.hardware [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 999.523628] env[61629]: DEBUG nova.virt.hardware [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 999.523812] env[61629]: DEBUG nova.virt.hardware [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 999.523966] env[61629]: DEBUG nova.virt.hardware [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 999.524139] env[61629]: DEBUG nova.virt.hardware [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 999.524357] env[61629]: DEBUG nova.virt.hardware [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 999.524521] env[61629]: DEBUG nova.virt.hardware [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 999.524689] env[61629]: DEBUG nova.virt.hardware [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 999.524857] env[61629]: DEBUG nova.virt.hardware [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 999.525049] env[61629]: DEBUG nova.virt.hardware [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 999.526018] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3145c24-777f-4728-80bf-c2afae267e75 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.536704] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354516, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.038837} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.538802] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] d95162d0-cc5e-4516-b76e-8d7736be1032/d95162d0-cc5e-4516-b76e-8d7736be1032.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 999.538986] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 999.539295] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47bf1385-d735-4dfb-a462-dd5d36260a3f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.542027] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d3e88e-98ac-44f1-bde8-7efd2da02abb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.557702] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Instance VIF info [] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 999.563831] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Creating folder: Project (ca3d2cc325ef43d384acd4869646f6ab). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 999.565672] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3ce1cb6e-c36d-48ac-a021-24258268c8d1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.567837] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 999.567837] env[61629]: value = "task-1354517" [ 999.567837] env[61629]: _type = "Task" [ 999.567837] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.578028] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354517, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.579516] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Created folder: Project (ca3d2cc325ef43d384acd4869646f6ab) in parent group-v288443. [ 999.579765] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Creating folder: Instances. Parent ref: group-v288572. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 999.582146] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96b7aa31-2852-43ab-8ccb-676888baf4f0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.593272] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Created folder: Instances in parent group-v288572. [ 999.593272] env[61629]: DEBUG oslo.service.loopingcall [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 999.593272] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 999.594484] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7e90ef7-93b0-428a-b8bb-89c792f806c6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.612584] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 999.612584] env[61629]: value = "task-1354520" [ 999.612584] env[61629]: _type = "Task" [ 999.612584] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.622750] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354520, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.656545] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Releasing lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.657013] env[61629]: DEBUG nova.compute.manager [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Instance network_info: |[{"id": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "address": "fa:16:3e:57:79:05", "network": {"id": "93a87623-e06d-4557-b118-c5170f70390e", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-948282834-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92d74dfdbfa74614b9950031e913799d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "423047aa-c430-4593-a76c-9982c15c81cf", "external-id": "nsx-vlan-transportzone-262", "segmentation_id": 262, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde2e75fd-8c", "ovs_interfaceid": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 999.657352] env[61629]: DEBUG oslo_concurrency.lockutils [req-544378d6-b2f6-4bc3-9f4c-6d4b9aa1fd63 req-9cc318cd-5b2c-4327-8bb3-f1bc71b54108 service nova] Acquired lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.657532] env[61629]: DEBUG nova.network.neutron [req-544378d6-b2f6-4bc3-9f4c-6d4b9aa1fd63 req-9cc318cd-5b2c-4327-8bb3-f1bc71b54108 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Refreshing network info cache for port de2e75fd-8c5a-4959-ac73-80b64539caa3 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 999.658855] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:79:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '423047aa-c430-4593-a76c-9982c15c81cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de2e75fd-8c5a-4959-ac73-80b64539caa3', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 999.666791] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Creating folder: Project (92d74dfdbfa74614b9950031e913799d). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 999.669869] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-832a5882-c3de-4b14-87c4-950a86796e1b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.684203] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Created folder: Project (92d74dfdbfa74614b9950031e913799d) in parent group-v288443. [ 999.684432] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Creating folder: Instances. Parent ref: group-v288575. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 999.684737] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f01a7355-8d47-40f6-b52a-120fce6217b5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.718951] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Created folder: Instances in parent group-v288575. [ 999.719274] env[61629]: DEBUG oslo.service.loopingcall [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 999.719787] env[61629]: DEBUG oslo_concurrency.lockutils [None req-dcbcc9cc-c154-4b84-84d6-cd2cc63e4984 tempest-AttachVolumeShelveTestJSON-1601778622 tempest-AttachVolumeShelveTestJSON-1601778622-project-member] Lock "fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.779s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.723597] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 999.723597] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a620b2a7-83c9-45bc-9bb2-bcfc55fd9dff {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.746276] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 999.746276] env[61629]: value = "task-1354523" [ 999.746276] env[61629]: _type = "Task" [ 999.746276] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.758119] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354523, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.949546] env[61629]: DEBUG nova.network.neutron [req-544378d6-b2f6-4bc3-9f4c-6d4b9aa1fd63 req-9cc318cd-5b2c-4327-8bb3-f1bc71b54108 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Updated VIF entry in instance network info cache for port de2e75fd-8c5a-4959-ac73-80b64539caa3. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 999.950027] env[61629]: DEBUG nova.network.neutron [req-544378d6-b2f6-4bc3-9f4c-6d4b9aa1fd63 req-9cc318cd-5b2c-4327-8bb3-f1bc71b54108 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Updating instance_info_cache with network_info: [{"id": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "address": "fa:16:3e:57:79:05", "network": {"id": "93a87623-e06d-4557-b118-c5170f70390e", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-948282834-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92d74dfdbfa74614b9950031e913799d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "423047aa-c430-4593-a76c-9982c15c81cf", "external-id": "nsx-vlan-transportzone-262", "segmentation_id": 262, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde2e75fd-8c", "ovs_interfaceid": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.077871] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354517, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073033} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.078145] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1000.078999] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906e01bc-9477-47bf-8083-19c88ec5af91 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.101934] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] d95162d0-cc5e-4516-b76e-8d7736be1032/d95162d0-cc5e-4516-b76e-8d7736be1032.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1000.102674] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f898b1f6-31d0-4385-a82f-84123b84bdbb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.126989] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354520, 'name': CreateVM_Task, 'duration_secs': 0.418957} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.128122] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1000.128477] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1000.128477] env[61629]: value = "task-1354524" [ 1000.128477] env[61629]: _type = "Task" [ 1000.128477] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.128890] env[61629]: DEBUG oslo_concurrency.lockutils [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.129068] env[61629]: DEBUG oslo_concurrency.lockutils [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.129398] env[61629]: DEBUG oslo_concurrency.lockutils [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1000.129704] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19817d08-0b34-4d06-85dd-13fdd2141d22 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.140972] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1000.140972] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52c42760-6dce-86a7-543c-5a3a756c424f" [ 1000.140972] env[61629]: _type = "Task" [ 1000.140972] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.141232] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354524, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.149046] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c42760-6dce-86a7-543c-5a3a756c424f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.255662] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354523, 'name': CreateVM_Task, 'duration_secs': 0.427879} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.255835] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1000.256535] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.317736] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f182c2a-2603-4963-9af0-c3cff92a2f3b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.325653] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835bd6ea-6572-4930-93af-a4e4d50a3433 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.357947] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72b1981-715c-4ed5-98fb-7f87815c4094 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.366010] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b85e90d-95b4-4f0b-aea1-4143a8fef020 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.380727] env[61629]: DEBUG nova.compute.provider_tree [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.452458] env[61629]: DEBUG oslo_concurrency.lockutils [req-544378d6-b2f6-4bc3-9f4c-6d4b9aa1fd63 req-9cc318cd-5b2c-4327-8bb3-f1bc71b54108 service nova] Releasing lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.639637] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354524, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.650978] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c42760-6dce-86a7-543c-5a3a756c424f, 'name': SearchDatastore_Task, 'duration_secs': 0.030962} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.651337] env[61629]: DEBUG oslo_concurrency.lockutils [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.651582] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1000.651819] env[61629]: DEBUG oslo_concurrency.lockutils [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.651970] env[61629]: DEBUG oslo_concurrency.lockutils [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.652184] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1000.652459] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.652778] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1000.653015] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e466f9b7-c0e9-4ea1-b2a7-6a221b80e6b5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.654783] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb4d8ddf-02e8-4fc3-b29b-d67467537abf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.660507] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Waiting for the task: (returnval){ [ 1000.660507] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52239413-941d-5622-11de-6145c3e9d825" [ 1000.660507] env[61629]: _type = "Task" [ 1000.660507] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.666330] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1000.666542] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1000.670030] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee28a86b-54e1-4fd7-b416-c16c9c0b1991 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.672250] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52239413-941d-5622-11de-6145c3e9d825, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.676094] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1000.676094] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5296f6f2-569a-2150-17b0-dc662aff85d2" [ 1000.676094] env[61629]: _type = "Task" [ 1000.676094] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.683746] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5296f6f2-569a-2150-17b0-dc662aff85d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.884164] env[61629]: DEBUG nova.scheduler.client.report [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1001.143617] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354524, 'name': ReconfigVM_Task, 'duration_secs': 0.87901} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.144017] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Reconfigured VM instance instance-00000061 to attach disk [datastore1] d95162d0-cc5e-4516-b76e-8d7736be1032/d95162d0-cc5e-4516-b76e-8d7736be1032.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1001.144871] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4d1dade9-6208-4271-a1cf-479d4a8aa037 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.153794] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1001.153794] env[61629]: value = "task-1354525" [ 1001.153794] env[61629]: _type = "Task" [ 1001.153794] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.165853] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354525, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.177307] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52239413-941d-5622-11de-6145c3e9d825, 'name': SearchDatastore_Task, 'duration_secs': 0.021281} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.183655] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.184029] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1001.184347] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.192018] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5296f6f2-569a-2150-17b0-dc662aff85d2, 'name': SearchDatastore_Task, 'duration_secs': 0.008619} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.193379] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27cc58c7-bad1-4cd3-bbb3-98c8664720ea {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.201817] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1001.201817] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52d5d8a8-7db5-0884-09dc-144db0411c13" [ 1001.201817] env[61629]: _type = "Task" [ 1001.201817] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.212660] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d5d8a8-7db5-0884-09dc-144db0411c13, 'name': SearchDatastore_Task, 'duration_secs': 0.008416} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.212931] env[61629]: DEBUG oslo_concurrency.lockutils [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.213393] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] a83f05b7-f998-4f45-afc1-836fae7c4b95/a83f05b7-f998-4f45-afc1-836fae7c4b95.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1001.213836] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.214082] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1001.214319] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae0a6c93-71d1-4890-a9f7-c13e9975c268 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.216845] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc5a4ec8-4fb2-4fc7-90f5-94305446c095 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.224012] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1001.224012] env[61629]: value = "task-1354526" [ 1001.224012] env[61629]: _type = "Task" [ 1001.224012] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.228628] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1001.228898] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1001.232746] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a779c06-a14a-48df-ae00-7a0a931c5da2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.234930] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354526, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.238115] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Waiting for the task: (returnval){ [ 1001.238115] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52658192-2e2a-d844-1a9f-e587411ee678" [ 1001.238115] env[61629]: _type = "Task" [ 1001.238115] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.246247] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52658192-2e2a-d844-1a9f-e587411ee678, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.389379] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.198s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.389953] env[61629]: DEBUG nova.compute.manager [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1001.392889] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.539s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.393168] env[61629]: DEBUG nova.objects.instance [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lazy-loading 'resources' on Instance uuid c5b6f6b8-587c-4b74-bc83-98dac319b15b {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.665340] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354525, 'name': Rename_Task, 'duration_secs': 0.147763} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.665691] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1001.666097] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a997799-2d50-48f9-9103-1c9e9f49b842 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.675966] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1001.675966] env[61629]: value = "task-1354527" [ 1001.675966] env[61629]: _type = "Task" [ 1001.675966] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.707888] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354527, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.734722] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354526, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487694} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.734722] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] a83f05b7-f998-4f45-afc1-836fae7c4b95/a83f05b7-f998-4f45-afc1-836fae7c4b95.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1001.734722] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1001.734998] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0cf055db-abe0-447d-96de-cd6ff5a63c1c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.745132] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1001.745132] env[61629]: value = "task-1354528" [ 1001.745132] env[61629]: _type = "Task" [ 1001.745132] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.753998] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52658192-2e2a-d844-1a9f-e587411ee678, 'name': SearchDatastore_Task, 'duration_secs': 0.009025} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.755242] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a290b46-3ea1-427d-ae2d-7f547d34aa37 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.760623] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354528, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.764955] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Waiting for the task: (returnval){ [ 1001.764955] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52452381-a958-7643-22ee-645b52c7ef7b" [ 1001.764955] env[61629]: _type = "Task" [ 1001.764955] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.776260] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52452381-a958-7643-22ee-645b52c7ef7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.896336] env[61629]: DEBUG nova.compute.utils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1001.897951] env[61629]: DEBUG nova.objects.instance [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lazy-loading 'numa_topology' on Instance uuid c5b6f6b8-587c-4b74-bc83-98dac319b15b {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.901954] env[61629]: DEBUG nova.compute.manager [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1001.901954] env[61629]: DEBUG nova.network.neutron [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1001.944147] env[61629]: DEBUG nova.policy [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec39705b9dd24915a0b3723ea45a85d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38efdd2cc07f45a49fb06d590aafb96b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 1002.198199] env[61629]: DEBUG oslo_vmware.api [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354527, 'name': PowerOnVM_Task, 'duration_secs': 0.500085} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.198199] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1002.198199] env[61629]: INFO nova.compute.manager [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Took 10.30 seconds to spawn the instance on the hypervisor. [ 1002.198199] env[61629]: DEBUG nova.compute.manager [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1002.198199] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd97d30-9618-423e-9da3-459502853d14 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.261304] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354528, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110216} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.261968] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1002.263601] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f39e82-b220-4964-ad21-9a8d440dcf0d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.298379] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] a83f05b7-f998-4f45-afc1-836fae7c4b95/a83f05b7-f998-4f45-afc1-836fae7c4b95.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1002.302545] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff79959b-660f-46c8-a938-4f8bb3911c84 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.316846] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52452381-a958-7643-22ee-645b52c7ef7b, 'name': SearchDatastore_Task, 'duration_secs': 0.01053} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.317502] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.317773] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4/9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1002.318443] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba9a6189-07c7-40e5-b0e0-ff09bd381833 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.324825] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1002.324825] env[61629]: value = "task-1354529" [ 1002.324825] env[61629]: _type = "Task" [ 1002.324825] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.329516] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Waiting for the task: (returnval){ [ 1002.329516] env[61629]: value = "task-1354530" [ 1002.329516] env[61629]: _type = "Task" [ 1002.329516] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.338031] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354529, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.343844] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.404021] env[61629]: DEBUG nova.compute.manager [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1002.405259] env[61629]: DEBUG nova.objects.base [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=61629) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1002.504228] env[61629]: DEBUG nova.network.neutron [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Successfully created port: cb382e93-c231-4c57-bab2-1adf21156500 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1002.608075] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f297c74-bbfb-446b-834e-8028cbfae50e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.616713] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f53611-d1ee-4cce-b3b5-5a328586a7f9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.652817] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f225d7-46a2-46cf-b5cc-70944c7279ba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.661879] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4ef71c-a7d6-41f7-a08d-290198c1a0ad {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.676800] env[61629]: DEBUG nova.compute.provider_tree [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.711100] env[61629]: INFO nova.compute.manager [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Took 16.05 seconds to build instance. [ 1002.837968] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354529, 'name': ReconfigVM_Task, 'duration_secs': 0.297419} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.838782] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Reconfigured VM instance instance-00000063 to attach disk [datastore1] a83f05b7-f998-4f45-afc1-836fae7c4b95/a83f05b7-f998-4f45-afc1-836fae7c4b95.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1002.839555] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6e59d335-3dfa-4e1d-899e-24117fd985d3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.844572] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354530, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.853435] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1002.853435] env[61629]: value = "task-1354531" [ 1002.853435] env[61629]: _type = "Task" [ 1002.853435] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.865356] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354531, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.179858] env[61629]: DEBUG nova.scheduler.client.report [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1003.215176] env[61629]: DEBUG oslo_concurrency.lockutils [None req-8d720ce6-05d2-4704-907b-8493f2d4b881 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "d95162d0-cc5e-4516-b76e-8d7736be1032" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.566s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.348027] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354530, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.786759} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.348149] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4/9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1003.348334] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1003.348952] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eaedfe1d-d1dd-4209-af9a-f7dc525a3c00 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.358970] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Waiting for the task: (returnval){ [ 1003.358970] env[61629]: value = "task-1354532" [ 1003.358970] env[61629]: _type = "Task" [ 1003.358970] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.365971] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354531, 'name': Rename_Task, 'duration_secs': 0.264692} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.366616] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1003.366894] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0738a2a-771e-4eb5-abe2-2fe3890118f8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.371502] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354532, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.378201] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1003.378201] env[61629]: value = "task-1354533" [ 1003.378201] env[61629]: _type = "Task" [ 1003.378201] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.390274] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354533, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.416207] env[61629]: DEBUG nova.compute.manager [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1003.444290] env[61629]: DEBUG nova.virt.hardware [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1003.444592] env[61629]: DEBUG nova.virt.hardware [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1003.444804] env[61629]: DEBUG nova.virt.hardware [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1003.444967] env[61629]: DEBUG nova.virt.hardware [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1003.445520] env[61629]: DEBUG nova.virt.hardware [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1003.445520] env[61629]: DEBUG nova.virt.hardware [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1003.445520] env[61629]: DEBUG nova.virt.hardware [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1003.445650] env[61629]: DEBUG nova.virt.hardware [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1003.445788] env[61629]: DEBUG nova.virt.hardware [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1003.445954] env[61629]: DEBUG nova.virt.hardware [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1003.446195] env[61629]: DEBUG nova.virt.hardware [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1003.447166] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b323486-6370-49a5-9d19-756bae1b7bd6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.456128] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437135ee-fb22-4be7-9292-c7f2d5749ef7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.685894] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.293s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.842636] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "d95162d0-cc5e-4516-b76e-8d7736be1032" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.843366] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "d95162d0-cc5e-4516-b76e-8d7736be1032" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.843366] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "d95162d0-cc5e-4516-b76e-8d7736be1032-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.843366] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "d95162d0-cc5e-4516-b76e-8d7736be1032-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.843366] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "d95162d0-cc5e-4516-b76e-8d7736be1032-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.845618] env[61629]: INFO nova.compute.manager [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Terminating instance [ 1003.849671] env[61629]: DEBUG nova.compute.manager [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1003.850161] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1003.850783] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2e549e-7903-43eb-ab80-e13adbed86c1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.859377] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1003.859698] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c193cb0-552e-4dca-a659-de612d15f54b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.870039] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354532, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067133} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.871400] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1003.871760] env[61629]: DEBUG oslo_vmware.api [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1003.871760] env[61629]: value = "task-1354534" [ 1003.871760] env[61629]: _type = "Task" [ 1003.871760] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.872504] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d66910d-35a4-4adc-9848-d69c1e64e112 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.894925] env[61629]: DEBUG oslo_vmware.api [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.903754] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4/9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1003.904526] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8acc78c0-0427-4a00-ab4c-5703ba230cce {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.922190] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354533, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.927974] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Waiting for the task: (returnval){ [ 1003.927974] env[61629]: value = "task-1354535" [ 1003.927974] env[61629]: _type = "Task" [ 1003.927974] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.936042] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354535, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.962288] env[61629]: DEBUG nova.compute.manager [req-ea0538e6-3ae0-4b42-8b5c-2e1db4bebe37 req-67cd8386-55da-4cdf-aa56-3be488228627 service nova] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Received event network-vif-plugged-cb382e93-c231-4c57-bab2-1adf21156500 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1003.962588] env[61629]: DEBUG oslo_concurrency.lockutils [req-ea0538e6-3ae0-4b42-8b5c-2e1db4bebe37 req-67cd8386-55da-4cdf-aa56-3be488228627 service nova] Acquiring lock "7da77dea-fea2-43a6-a98a-6c492d1a041b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.962782] env[61629]: DEBUG oslo_concurrency.lockutils [req-ea0538e6-3ae0-4b42-8b5c-2e1db4bebe37 req-67cd8386-55da-4cdf-aa56-3be488228627 service nova] Lock "7da77dea-fea2-43a6-a98a-6c492d1a041b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.962983] env[61629]: DEBUG oslo_concurrency.lockutils [req-ea0538e6-3ae0-4b42-8b5c-2e1db4bebe37 req-67cd8386-55da-4cdf-aa56-3be488228627 service nova] Lock "7da77dea-fea2-43a6-a98a-6c492d1a041b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.963193] env[61629]: DEBUG nova.compute.manager [req-ea0538e6-3ae0-4b42-8b5c-2e1db4bebe37 req-67cd8386-55da-4cdf-aa56-3be488228627 service nova] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] No waiting events found dispatching network-vif-plugged-cb382e93-c231-4c57-bab2-1adf21156500 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1003.963380] env[61629]: WARNING nova.compute.manager [req-ea0538e6-3ae0-4b42-8b5c-2e1db4bebe37 req-67cd8386-55da-4cdf-aa56-3be488228627 service nova] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Received unexpected event network-vif-plugged-cb382e93-c231-4c57-bab2-1adf21156500 for instance with vm_state building and task_state spawning. [ 1004.070434] env[61629]: DEBUG nova.network.neutron [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Successfully updated port: cb382e93-c231-4c57-bab2-1adf21156500 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1004.195257] env[61629]: DEBUG oslo_concurrency.lockutils [None req-6da32cfc-12d7-40ac-88d6-25ccfb656519 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.602s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.196133] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 4.783s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.196324] env[61629]: INFO nova.compute.manager [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Unshelving [ 1004.385293] env[61629]: DEBUG oslo_vmware.api [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354534, 'name': PowerOffVM_Task, 'duration_secs': 0.179477} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.385917] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1004.386389] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1004.386389] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6265e97e-5254-424c-807a-9fbe6a859125 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.391262] env[61629]: DEBUG oslo_vmware.api [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354533, 'name': PowerOnVM_Task, 'duration_secs': 0.540398} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.391856] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1004.392090] env[61629]: INFO nova.compute.manager [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Took 4.90 seconds to spawn the instance on the hypervisor. [ 1004.392290] env[61629]: DEBUG nova.compute.manager [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1004.393213] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5c864b-c2da-4b03-9a3e-abbdbfe9296c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.438554] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354535, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.458966] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1004.459326] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1004.459523] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleting the datastore file [datastore1] d95162d0-cc5e-4516-b76e-8d7736be1032 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1004.459818] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71b5c982-36bb-4719-b566-95ed96edbac4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.467232] env[61629]: DEBUG oslo_vmware.api [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1004.467232] env[61629]: value = "task-1354537" [ 1004.467232] env[61629]: _type = "Task" [ 1004.467232] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.476053] env[61629]: DEBUG oslo_vmware.api [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354537, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.574701] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "refresh_cache-7da77dea-fea2-43a6-a98a-6c492d1a041b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.574880] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "refresh_cache-7da77dea-fea2-43a6-a98a-6c492d1a041b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.575117] env[61629]: DEBUG nova.network.neutron [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1004.912270] env[61629]: INFO nova.compute.manager [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Took 10.72 seconds to build instance. [ 1004.938889] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354535, 'name': ReconfigVM_Task, 'duration_secs': 0.801552} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.939317] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4/9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1004.940018] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd20431a-f347-463b-88f8-ffd94b1285e0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.950224] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Waiting for the task: (returnval){ [ 1004.950224] env[61629]: value = "task-1354538" [ 1004.950224] env[61629]: _type = "Task" [ 1004.950224] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.959893] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354538, 'name': Rename_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.976931] env[61629]: DEBUG oslo_vmware.api [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354537, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141013} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.977247] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1004.977489] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1004.977858] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1004.978180] env[61629]: INFO nova.compute.manager [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1004.978473] env[61629]: DEBUG oslo.service.loopingcall [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1004.978677] env[61629]: DEBUG nova.compute.manager [-] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1004.978785] env[61629]: DEBUG nova.network.neutron [-] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1005.144333] env[61629]: DEBUG nova.network.neutron [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1005.224589] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.224906] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.225173] env[61629]: DEBUG nova.objects.instance [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lazy-loading 'pci_requests' on Instance uuid c5b6f6b8-587c-4b74-bc83-98dac319b15b {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.414699] env[61629]: DEBUG nova.network.neutron [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Updating instance_info_cache with network_info: [{"id": "cb382e93-c231-4c57-bab2-1adf21156500", "address": "fa:16:3e:19:f9:4e", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb382e93-c2", "ovs_interfaceid": "cb382e93-c231-4c57-bab2-1adf21156500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.416540] env[61629]: DEBUG oslo_concurrency.lockutils [None req-631b4887-145b-43db-b9d2-14c3d18279d5 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Lock "a83f05b7-f998-4f45-afc1-836fae7c4b95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.233s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.461251] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354538, 'name': Rename_Task, 'duration_secs': 0.241558} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.461548] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1005.462540] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-746b528b-254d-4d4b-b953-919319b8413b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.470383] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Waiting for the task: (returnval){ [ 1005.470383] env[61629]: value = "task-1354539" [ 1005.470383] env[61629]: _type = "Task" [ 1005.470383] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.480691] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354539, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.730062] env[61629]: DEBUG nova.objects.instance [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lazy-loading 'numa_topology' on Instance uuid c5b6f6b8-587c-4b74-bc83-98dac319b15b {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.878656] env[61629]: DEBUG nova.network.neutron [-] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.919312] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "refresh_cache-7da77dea-fea2-43a6-a98a-6c492d1a041b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.919650] env[61629]: DEBUG nova.compute.manager [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Instance network_info: |[{"id": "cb382e93-c231-4c57-bab2-1adf21156500", "address": "fa:16:3e:19:f9:4e", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb382e93-c2", "ovs_interfaceid": "cb382e93-c231-4c57-bab2-1adf21156500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1005.920058] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:f9:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb382e93-c231-4c57-bab2-1adf21156500', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1005.927724] env[61629]: DEBUG oslo.service.loopingcall [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1005.929134] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1005.929134] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27748284-02b3-4357-bcc9-e80ff38c580b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.949019] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1005.949019] env[61629]: value = "task-1354540" [ 1005.949019] env[61629]: _type = "Task" [ 1005.949019] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.958405] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354540, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.980433] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354539, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.992319] env[61629]: DEBUG nova.compute.manager [req-a01a871b-8e8c-4b21-9e22-331d902754a9 req-aa8b622b-ba9c-4915-8f63-384a29ed9f3a service nova] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Received event network-changed-cb382e93-c231-4c57-bab2-1adf21156500 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1005.992535] env[61629]: DEBUG nova.compute.manager [req-a01a871b-8e8c-4b21-9e22-331d902754a9 req-aa8b622b-ba9c-4915-8f63-384a29ed9f3a service nova] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Refreshing instance network info cache due to event network-changed-cb382e93-c231-4c57-bab2-1adf21156500. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1005.993496] env[61629]: DEBUG oslo_concurrency.lockutils [req-a01a871b-8e8c-4b21-9e22-331d902754a9 req-aa8b622b-ba9c-4915-8f63-384a29ed9f3a service nova] Acquiring lock "refresh_cache-7da77dea-fea2-43a6-a98a-6c492d1a041b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.993496] env[61629]: DEBUG oslo_concurrency.lockutils [req-a01a871b-8e8c-4b21-9e22-331d902754a9 req-aa8b622b-ba9c-4915-8f63-384a29ed9f3a service nova] Acquired lock "refresh_cache-7da77dea-fea2-43a6-a98a-6c492d1a041b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.993496] env[61629]: DEBUG nova.network.neutron [req-a01a871b-8e8c-4b21-9e22-331d902754a9 req-aa8b622b-ba9c-4915-8f63-384a29ed9f3a service nova] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Refreshing network info cache for port cb382e93-c231-4c57-bab2-1adf21156500 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1006.714589] env[61629]: INFO nova.compute.manager [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Rebuilding instance [ 1006.716765] env[61629]: INFO nova.compute.claims [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1006.719331] env[61629]: INFO nova.compute.manager [-] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Took 1.74 seconds to deallocate network for instance. [ 1006.741419] env[61629]: DEBUG oslo_vmware.api [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354539, 'name': PowerOnVM_Task, 'duration_secs': 0.952497} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.741872] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354540, 'name': CreateVM_Task, 'duration_secs': 0.520012} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.743837] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1006.744067] env[61629]: INFO nova.compute.manager [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Took 9.48 seconds to spawn the instance on the hypervisor. [ 1006.744325] env[61629]: DEBUG nova.compute.manager [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1006.744537] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1006.745457] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0578d624-b4fd-43a0-9ac2-013d9235b629 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.749702] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.749702] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.749702] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1006.749702] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28bf6293-5c46-4bc6-b400-7273e3f34ec6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.764260] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1006.764260] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]525c93a0-e322-889d-1122-5de0dfa84e31" [ 1006.764260] env[61629]: _type = "Task" [ 1006.764260] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.774806] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]525c93a0-e322-889d-1122-5de0dfa84e31, 'name': SearchDatastore_Task, 'duration_secs': 0.010594} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.775737] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.775977] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1006.776603] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.776603] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.776603] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1006.777024] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13734197-6ec1-433f-a3d9-29ce5124ac02 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.788176] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1006.788367] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1006.789296] env[61629]: DEBUG nova.compute.manager [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1006.791341] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7a378ff-8ac3-4f81-a90d-20b27fdc7b42 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.792474] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128f5790-4a0b-4e75-b9d9-33cdcdc078db {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.802818] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1006.802818] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52853c30-3f7b-f8e0-592f-c77e809e30e8" [ 1006.802818] env[61629]: _type = "Task" [ 1006.802818] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.812112] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52853c30-3f7b-f8e0-592f-c77e809e30e8, 'name': SearchDatastore_Task, 'duration_secs': 0.009477} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.812892] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2da2c5e-f63c-4875-a368-860ba37a5531 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.818260] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1006.818260] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52289241-e2e6-553d-c4eb-638cefcaf734" [ 1006.818260] env[61629]: _type = "Task" [ 1006.818260] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.826490] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52289241-e2e6-553d-c4eb-638cefcaf734, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.233324] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.273844] env[61629]: INFO nova.compute.manager [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Took 14.32 seconds to build instance. [ 1007.304642] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.304882] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-263f8c70-5f96-4405-80ed-2a749b867bca {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.313496] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1007.313496] env[61629]: value = "task-1354541" [ 1007.313496] env[61629]: _type = "Task" [ 1007.313496] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.325852] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354541, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.333714] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52289241-e2e6-553d-c4eb-638cefcaf734, 'name': SearchDatastore_Task, 'duration_secs': 0.009637} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.333979] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.334257] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 7da77dea-fea2-43a6-a98a-6c492d1a041b/7da77dea-fea2-43a6-a98a-6c492d1a041b.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1007.334515] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-311b7e33-1605-4f43-8761-a8ab896dcdff {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.341873] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1007.341873] env[61629]: value = "task-1354542" [ 1007.341873] env[61629]: _type = "Task" [ 1007.341873] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.350479] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354542, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.454341] env[61629]: DEBUG nova.network.neutron [req-a01a871b-8e8c-4b21-9e22-331d902754a9 req-aa8b622b-ba9c-4915-8f63-384a29ed9f3a service nova] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Updated VIF entry in instance network info cache for port cb382e93-c231-4c57-bab2-1adf21156500. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1007.454826] env[61629]: DEBUG nova.network.neutron [req-a01a871b-8e8c-4b21-9e22-331d902754a9 req-aa8b622b-ba9c-4915-8f63-384a29ed9f3a service nova] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Updating instance_info_cache with network_info: [{"id": "cb382e93-c231-4c57-bab2-1adf21156500", "address": "fa:16:3e:19:f9:4e", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb382e93-c2", "ovs_interfaceid": "cb382e93-c231-4c57-bab2-1adf21156500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.776515] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7a56501e-0f3f-48c9-a8ae-e0c6fac314f2 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Lock "9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.837s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.823418] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354541, 'name': PowerOffVM_Task, 'duration_secs': 0.215176} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.825993] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.826247] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1007.827246] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c55663a-c103-4ccc-be12-e963595cf87d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.834115] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1007.834362] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f564ad8c-a898-46df-97f0-cddbfbd6c71a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.850621] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354542, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469615} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.852977] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 7da77dea-fea2-43a6-a98a-6c492d1a041b/7da77dea-fea2-43a6-a98a-6c492d1a041b.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1007.853229] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1007.853725] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-413bf9a5-42e4-49ee-a7bd-cf3ab162bdc9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.860610] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1007.860610] env[61629]: value = "task-1354544" [ 1007.860610] env[61629]: _type = "Task" [ 1007.860610] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.867966] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1007.868317] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1007.868608] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Deleting the datastore file [datastore1] a83f05b7-f998-4f45-afc1-836fae7c4b95 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1007.869018] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea2388ab-5f14-4cd7-9faa-c05137e9a23c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.875777] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354544, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.881226] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1007.881226] env[61629]: value = "task-1354545" [ 1007.881226] env[61629]: _type = "Task" [ 1007.881226] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.882965] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42d0c35-79ef-48e4-b643-f5a1f97117cd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.895500] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3732a87a-530e-4399-88db-5e7674fd343c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.899385] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354545, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.927759] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549fd65f-4a22-41cc-b9fd-77a52eb3fef2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.935563] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc48f1a6-a710-4966-89d9-2368ec0fce8f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.949936] env[61629]: DEBUG nova.compute.provider_tree [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.958163] env[61629]: DEBUG oslo_concurrency.lockutils [req-a01a871b-8e8c-4b21-9e22-331d902754a9 req-aa8b622b-ba9c-4915-8f63-384a29ed9f3a service nova] Releasing lock "refresh_cache-7da77dea-fea2-43a6-a98a-6c492d1a041b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.958433] env[61629]: DEBUG nova.compute.manager [req-a01a871b-8e8c-4b21-9e22-331d902754a9 req-aa8b622b-ba9c-4915-8f63-384a29ed9f3a service nova] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Received event network-vif-deleted-bf1ec645-7d1d-4e20-8075-a16aa9ea0a48 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1008.370625] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354544, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065242} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.370957] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1008.371787] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9332c7-1afa-40eb-b844-5d923ede8722 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.394336] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 7da77dea-fea2-43a6-a98a-6c492d1a041b/7da77dea-fea2-43a6-a98a-6c492d1a041b.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1008.397308] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08d00b19-ceb5-4de3-9789-eaf182a9ff76 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.416708] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354545, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091806} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.417944] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1008.418198] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1008.418509] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1008.420962] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1008.420962] env[61629]: value = "task-1354546" [ 1008.420962] env[61629]: _type = "Task" [ 1008.420962] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.428810] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354546, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.453213] env[61629]: DEBUG nova.scheduler.client.report [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1008.510520] env[61629]: DEBUG nova.compute.manager [req-05fb7527-da77-46e5-87be-9482da4b5514 req-df0c3045-d27b-4cb4-bdcc-c285b419589e service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Received event network-changed-de2e75fd-8c5a-4959-ac73-80b64539caa3 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1008.510730] env[61629]: DEBUG nova.compute.manager [req-05fb7527-da77-46e5-87be-9482da4b5514 req-df0c3045-d27b-4cb4-bdcc-c285b419589e service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Refreshing instance network info cache due to event network-changed-de2e75fd-8c5a-4959-ac73-80b64539caa3. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1008.511029] env[61629]: DEBUG oslo_concurrency.lockutils [req-05fb7527-da77-46e5-87be-9482da4b5514 req-df0c3045-d27b-4cb4-bdcc-c285b419589e service nova] Acquiring lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.511246] env[61629]: DEBUG oslo_concurrency.lockutils [req-05fb7527-da77-46e5-87be-9482da4b5514 req-df0c3045-d27b-4cb4-bdcc-c285b419589e service nova] Acquired lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.511453] env[61629]: DEBUG nova.network.neutron [req-05fb7527-da77-46e5-87be-9482da4b5514 req-df0c3045-d27b-4cb4-bdcc-c285b419589e service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Refreshing network info cache for port de2e75fd-8c5a-4959-ac73-80b64539caa3 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1008.935511] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354546, 'name': ReconfigVM_Task, 'duration_secs': 0.46841} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.935860] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 7da77dea-fea2-43a6-a98a-6c492d1a041b/7da77dea-fea2-43a6-a98a-6c492d1a041b.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1008.936507] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3328157-63a8-44f0-a808-1c433ff5eab6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.943902] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1008.943902] env[61629]: value = "task-1354547" [ 1008.943902] env[61629]: _type = "Task" [ 1008.943902] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.952508] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354547, 'name': Rename_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.958257] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.733s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.960365] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.727s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.960599] env[61629]: DEBUG nova.objects.instance [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lazy-loading 'resources' on Instance uuid d95162d0-cc5e-4516-b76e-8d7736be1032 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1009.009935] env[61629]: INFO nova.network.neutron [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Updating port 57805f12-9b81-4485-8f3a-32567ed40a8c with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1009.367082] env[61629]: DEBUG nova.network.neutron [req-05fb7527-da77-46e5-87be-9482da4b5514 req-df0c3045-d27b-4cb4-bdcc-c285b419589e service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Updated VIF entry in instance network info cache for port de2e75fd-8c5a-4959-ac73-80b64539caa3. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1009.367082] env[61629]: DEBUG nova.network.neutron [req-05fb7527-da77-46e5-87be-9482da4b5514 req-df0c3045-d27b-4cb4-bdcc-c285b419589e service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Updating instance_info_cache with network_info: [{"id": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "address": "fa:16:3e:57:79:05", "network": {"id": "93a87623-e06d-4557-b118-c5170f70390e", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-948282834-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92d74dfdbfa74614b9950031e913799d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "423047aa-c430-4593-a76c-9982c15c81cf", "external-id": "nsx-vlan-transportzone-262", "segmentation_id": 262, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde2e75fd-8c", "ovs_interfaceid": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.457879] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354547, 'name': Rename_Task, 'duration_secs': 0.497147} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.458769] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1009.458769] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-59406401-40d9-4111-b69e-890c12ca0a51 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.462130] env[61629]: DEBUG nova.virt.hardware [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1009.462366] env[61629]: DEBUG nova.virt.hardware [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1009.462526] env[61629]: DEBUG nova.virt.hardware [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1009.462710] env[61629]: DEBUG nova.virt.hardware [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1009.462857] env[61629]: DEBUG nova.virt.hardware [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1009.463021] env[61629]: DEBUG nova.virt.hardware [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1009.463230] env[61629]: DEBUG nova.virt.hardware [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1009.463393] env[61629]: DEBUG nova.virt.hardware [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1009.463664] env[61629]: DEBUG nova.virt.hardware [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1009.463868] env[61629]: DEBUG nova.virt.hardware [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1009.464062] env[61629]: DEBUG nova.virt.hardware [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1009.467009] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1e9d35-8714-4d73-9d5e-d69157e6ee15 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.476507] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b14fb6-4a7e-468c-a4e7-9ba89f76374f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.480321] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1009.480321] env[61629]: value = "task-1354548" [ 1009.480321] env[61629]: _type = "Task" [ 1009.480321] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.493348] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Instance VIF info [] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1009.498873] env[61629]: DEBUG oslo.service.loopingcall [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1009.500123] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1009.500202] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff5cb557-4318-4bdc-85c8-6eb06ee19b1a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.517230] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354548, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.523214] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1009.523214] env[61629]: value = "task-1354549" [ 1009.523214] env[61629]: _type = "Task" [ 1009.523214] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.531342] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354549, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.620721] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e312a9a5-17af-479e-be16-c16aea0b5f65 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.629784] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f415c3b9-a80b-4f2b-bf06-96302b94a0ab {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.666036] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db75497c-191d-409e-8ee2-660a93c4f518 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.673842] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88912e53-2b2e-466c-bca0-c201a2055f93 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.687479] env[61629]: DEBUG nova.compute.provider_tree [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.869521] env[61629]: DEBUG oslo_concurrency.lockutils [req-05fb7527-da77-46e5-87be-9482da4b5514 req-df0c3045-d27b-4cb4-bdcc-c285b419589e service nova] Releasing lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.989843] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354548, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.032605] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354549, 'name': CreateVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.190930] env[61629]: DEBUG nova.scheduler.client.report [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1010.491034] env[61629]: DEBUG oslo_vmware.api [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354548, 'name': PowerOnVM_Task, 'duration_secs': 0.844765} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.491368] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1010.491590] env[61629]: INFO nova.compute.manager [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Took 7.08 seconds to spawn the instance on the hypervisor. [ 1010.491772] env[61629]: DEBUG nova.compute.manager [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1010.492545] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4449cbd-494a-4f25-a7ca-0a702237326a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.534252] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354549, 'name': CreateVM_Task} progress is 99%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.578682] env[61629]: DEBUG nova.compute.manager [req-63679f6a-31ef-435f-b54b-48b2f53f5096 req-c65a867d-b618-4141-b95e-4fb1a09ab49c service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Received event network-vif-plugged-57805f12-9b81-4485-8f3a-32567ed40a8c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1010.579038] env[61629]: DEBUG oslo_concurrency.lockutils [req-63679f6a-31ef-435f-b54b-48b2f53f5096 req-c65a867d-b618-4141-b95e-4fb1a09ab49c service nova] Acquiring lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.579210] env[61629]: DEBUG oslo_concurrency.lockutils [req-63679f6a-31ef-435f-b54b-48b2f53f5096 req-c65a867d-b618-4141-b95e-4fb1a09ab49c service nova] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.579394] env[61629]: DEBUG oslo_concurrency.lockutils [req-63679f6a-31ef-435f-b54b-48b2f53f5096 req-c65a867d-b618-4141-b95e-4fb1a09ab49c service nova] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.579568] env[61629]: DEBUG nova.compute.manager [req-63679f6a-31ef-435f-b54b-48b2f53f5096 req-c65a867d-b618-4141-b95e-4fb1a09ab49c service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] No waiting events found dispatching network-vif-plugged-57805f12-9b81-4485-8f3a-32567ed40a8c {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1010.579736] env[61629]: WARNING nova.compute.manager [req-63679f6a-31ef-435f-b54b-48b2f53f5096 req-c65a867d-b618-4141-b95e-4fb1a09ab49c service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Received unexpected event network-vif-plugged-57805f12-9b81-4485-8f3a-32567ed40a8c for instance with vm_state shelved_offloaded and task_state spawning. [ 1010.668132] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.668132] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquired lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.668132] env[61629]: DEBUG nova.network.neutron [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1010.695277] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.735s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.717832] env[61629]: INFO nova.scheduler.client.report [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleted allocations for instance d95162d0-cc5e-4516-b76e-8d7736be1032 [ 1011.010856] env[61629]: INFO nova.compute.manager [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Took 13.82 seconds to build instance. [ 1011.035300] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354549, 'name': CreateVM_Task, 'duration_secs': 1.318618} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.035475] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1011.035893] env[61629]: DEBUG oslo_concurrency.lockutils [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.036074] env[61629]: DEBUG oslo_concurrency.lockutils [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.036410] env[61629]: DEBUG oslo_concurrency.lockutils [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1011.036668] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e28a4bc3-da50-4426-af05-f82ba0bffced {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.041713] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1011.041713] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]529eda2a-1338-2a2a-8c99-b1998f02d42d" [ 1011.041713] env[61629]: _type = "Task" [ 1011.041713] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.049222] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]529eda2a-1338-2a2a-8c99-b1998f02d42d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.225218] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b928bdd8-fbce-42bb-9d0b-fb1e124a7e16 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "d95162d0-cc5e-4516-b76e-8d7736be1032" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.382s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.408829] env[61629]: DEBUG nova.network.neutron [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Updating instance_info_cache with network_info: [{"id": "57805f12-9b81-4485-8f3a-32567ed40a8c", "address": "fa:16:3e:ca:29:87", "network": {"id": "249c4ba3-38e0-421a-91b6-cf97f90eb535", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1700423127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd318d29ec50427eb997c83837120c9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57805f12-9b", "ovs_interfaceid": "57805f12-9b81-4485-8f3a-32567ed40a8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.513602] env[61629]: DEBUG oslo_concurrency.lockutils [None req-48033a09-cbfd-45e2-87cf-69277194cf2a tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "7da77dea-fea2-43a6-a98a-6c492d1a041b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.335s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.551705] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]529eda2a-1338-2a2a-8c99-b1998f02d42d, 'name': SearchDatastore_Task, 'duration_secs': 0.011942} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.552037] env[61629]: DEBUG oslo_concurrency.lockutils [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.552283] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1011.552517] env[61629]: DEBUG oslo_concurrency.lockutils [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.552668] env[61629]: DEBUG oslo_concurrency.lockutils [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.552851] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1011.553400] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a581f781-7f54-42e2-824e-be901b9548cb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.561673] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1011.561844] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1011.562535] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96b24b63-832e-400b-807d-34fb5dbbdc63 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.567535] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1011.567535] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5277c492-9012-146c-c109-f298279a31d2" [ 1011.567535] env[61629]: _type = "Task" [ 1011.567535] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.575259] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5277c492-9012-146c-c109-f298279a31d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.584739] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3434f6a5-51c9-4351-9d7d-a37547947ec4 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "7da77dea-fea2-43a6-a98a-6c492d1a041b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.584966] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3434f6a5-51c9-4351-9d7d-a37547947ec4 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "7da77dea-fea2-43a6-a98a-6c492d1a041b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.585189] env[61629]: DEBUG nova.compute.manager [None req-3434f6a5-51c9-4351-9d7d-a37547947ec4 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1011.585938] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4c0a94-f884-4fcd-9c09-b8be4c4e7a83 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.591986] env[61629]: DEBUG nova.compute.manager [None req-3434f6a5-51c9-4351-9d7d-a37547947ec4 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61629) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1011.592555] env[61629]: DEBUG nova.objects.instance [None req-3434f6a5-51c9-4351-9d7d-a37547947ec4 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lazy-loading 'flavor' on Instance uuid 7da77dea-fea2-43a6-a98a-6c492d1a041b {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.696706] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "22f71f92-ca9a-4b97-a652-3f34a0dabde2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.697055] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "22f71f92-ca9a-4b97-a652-3f34a0dabde2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.697316] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "22f71f92-ca9a-4b97-a652-3f34a0dabde2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.697512] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "22f71f92-ca9a-4b97-a652-3f34a0dabde2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.697690] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "22f71f92-ca9a-4b97-a652-3f34a0dabde2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.699757] env[61629]: INFO nova.compute.manager [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Terminating instance [ 1011.701537] env[61629]: DEBUG nova.compute.manager [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1011.701733] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1011.702613] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b689b5-0316-420b-a1b5-cbff5af23652 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.710849] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1011.711100] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9582903b-773f-4774-b886-7349d167b075 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.718618] env[61629]: DEBUG oslo_vmware.api [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1011.718618] env[61629]: value = "task-1354550" [ 1011.718618] env[61629]: _type = "Task" [ 1011.718618] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.726924] env[61629]: DEBUG oslo_vmware.api [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354550, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.912031] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Releasing lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.945898] env[61629]: DEBUG nova.virt.hardware [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='e8e70c0b9e6a9ce4f9f59af6075d06a8',container_format='bare',created_at=2024-10-24T13:03:05Z,direct_url=,disk_format='vmdk',id=d1404c99-66c8-439f-b330-d6263aa8a1a3,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-375305816-shelved',owner='bd318d29ec50427eb997c83837120c9c',properties=ImageMetaProps,protected=,size=31665664,status='active',tags=,updated_at=2024-10-24T13:03:19Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1011.946181] env[61629]: DEBUG nova.virt.hardware [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1011.946344] env[61629]: DEBUG nova.virt.hardware [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1011.946537] env[61629]: DEBUG nova.virt.hardware [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1011.946689] env[61629]: DEBUG nova.virt.hardware [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1011.946840] env[61629]: DEBUG nova.virt.hardware [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1011.947138] env[61629]: DEBUG nova.virt.hardware [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1011.947315] env[61629]: DEBUG nova.virt.hardware [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1011.947489] env[61629]: DEBUG nova.virt.hardware [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1011.947655] env[61629]: DEBUG nova.virt.hardware [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1011.947830] env[61629]: DEBUG nova.virt.hardware [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1011.948713] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849b69d1-830d-4325-a12e-584ab4209285 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.957085] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e9edfe-8c1b-4dbc-ba4d-40333f42ff34 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.971351] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:29:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57805f12-9b81-4485-8f3a-32567ed40a8c', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1011.978711] env[61629]: DEBUG oslo.service.loopingcall [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1011.978998] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1011.979237] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-797b5c66-92d3-463b-acd0-02c187f4256d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.999385] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1011.999385] env[61629]: value = "task-1354551" [ 1011.999385] env[61629]: _type = "Task" [ 1011.999385] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.007447] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354551, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.081828] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5277c492-9012-146c-c109-f298279a31d2, 'name': SearchDatastore_Task, 'duration_secs': 0.009436} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.082767] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5717a52-35d9-4e1f-afff-63db5f98e0f3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.088421] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1012.088421] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5284bfc8-c497-c2ea-76f3-6261afc65ca2" [ 1012.088421] env[61629]: _type = "Task" [ 1012.088421] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.097731] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5284bfc8-c497-c2ea-76f3-6261afc65ca2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.098236] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-3434f6a5-51c9-4351-9d7d-a37547947ec4 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.098459] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34ba76bc-34c6-4045-a2de-04329757071e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.104665] env[61629]: DEBUG oslo_vmware.api [None req-3434f6a5-51c9-4351-9d7d-a37547947ec4 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1012.104665] env[61629]: value = "task-1354552" [ 1012.104665] env[61629]: _type = "Task" [ 1012.104665] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.112614] env[61629]: DEBUG oslo_vmware.api [None req-3434f6a5-51c9-4351-9d7d-a37547947ec4 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354552, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.229597] env[61629]: DEBUG oslo_vmware.api [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354550, 'name': PowerOffVM_Task, 'duration_secs': 0.223586} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.229772] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1012.229899] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1012.230096] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca1233a2-34aa-4107-8814-8032cbd1cd1a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.316550] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1012.316702] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1012.316907] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleting the datastore file [datastore1] 22f71f92-ca9a-4b97-a652-3f34a0dabde2 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1012.317724] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26177f62-d499-46ca-a88f-75e62e70e582 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.325698] env[61629]: DEBUG oslo_vmware.api [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1012.325698] env[61629]: value = "task-1354554" [ 1012.325698] env[61629]: _type = "Task" [ 1012.325698] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.334217] env[61629]: DEBUG oslo_vmware.api [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354554, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.510782] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354551, 'name': CreateVM_Task, 'duration_secs': 0.357729} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.510782] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1012.510992] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1404c99-66c8-439f-b330-d6263aa8a1a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.511374] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1404c99-66c8-439f-b330-d6263aa8a1a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.511761] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d1404c99-66c8-439f-b330-d6263aa8a1a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1012.512023] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7878828-d2ac-456a-8829-1cb1db4627e8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.516145] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 1012.516145] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52e730a3-f49c-acf1-4140-c173a6844423" [ 1012.516145] env[61629]: _type = "Task" [ 1012.516145] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.523788] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52e730a3-f49c-acf1-4140-c173a6844423, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.598651] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5284bfc8-c497-c2ea-76f3-6261afc65ca2, 'name': SearchDatastore_Task, 'duration_secs': 0.010233} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.598926] env[61629]: DEBUG oslo_concurrency.lockutils [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.599261] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] a83f05b7-f998-4f45-afc1-836fae7c4b95/a83f05b7-f998-4f45-afc1-836fae7c4b95.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1012.599525] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f59b9d9-d655-4200-b034-43331a92a429 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.603763] env[61629]: DEBUG nova.compute.manager [req-3732a979-931e-4430-b319-eeeeba879ea7 req-7103ad03-8a8c-4496-9b09-975547c22c8b service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Received event network-changed-57805f12-9b81-4485-8f3a-32567ed40a8c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1012.603903] env[61629]: DEBUG nova.compute.manager [req-3732a979-931e-4430-b319-eeeeba879ea7 req-7103ad03-8a8c-4496-9b09-975547c22c8b service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Refreshing instance network info cache due to event network-changed-57805f12-9b81-4485-8f3a-32567ed40a8c. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1012.604134] env[61629]: DEBUG oslo_concurrency.lockutils [req-3732a979-931e-4430-b319-eeeeba879ea7 req-7103ad03-8a8c-4496-9b09-975547c22c8b service nova] Acquiring lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.604283] env[61629]: DEBUG oslo_concurrency.lockutils [req-3732a979-931e-4430-b319-eeeeba879ea7 req-7103ad03-8a8c-4496-9b09-975547c22c8b service nova] Acquired lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.604442] env[61629]: DEBUG nova.network.neutron [req-3732a979-931e-4430-b319-eeeeba879ea7 req-7103ad03-8a8c-4496-9b09-975547c22c8b service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Refreshing network info cache for port 57805f12-9b81-4485-8f3a-32567ed40a8c {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1012.610231] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1012.610231] env[61629]: value = "task-1354555" [ 1012.610231] env[61629]: _type = "Task" [ 1012.610231] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.617577] env[61629]: DEBUG oslo_vmware.api [None req-3434f6a5-51c9-4351-9d7d-a37547947ec4 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354552, 'name': PowerOffVM_Task, 'duration_secs': 0.174051} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.618180] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-3434f6a5-51c9-4351-9d7d-a37547947ec4 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1012.618398] env[61629]: DEBUG nova.compute.manager [None req-3434f6a5-51c9-4351-9d7d-a37547947ec4 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1012.619433] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a4de81-f7b7-4b76-b9e6-e688ae1bb188 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.624984] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354555, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.837336] env[61629]: DEBUG oslo_vmware.api [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354554, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154512} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.837759] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1012.838056] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1012.838326] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1012.838579] env[61629]: INFO nova.compute.manager [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1012.838931] env[61629]: DEBUG oslo.service.loopingcall [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1012.839336] env[61629]: DEBUG nova.compute.manager [-] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1012.839465] env[61629]: DEBUG nova.network.neutron [-] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1013.029962] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1404c99-66c8-439f-b330-d6263aa8a1a3" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1013.030342] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Processing image d1404c99-66c8-439f-b330-d6263aa8a1a3 {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1013.030591] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d1404c99-66c8-439f-b330-d6263aa8a1a3/d1404c99-66c8-439f-b330-d6263aa8a1a3.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.030794] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d1404c99-66c8-439f-b330-d6263aa8a1a3/d1404c99-66c8-439f-b330-d6263aa8a1a3.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.030988] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1013.031289] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6241a06d-e9d8-47c4-9750-31ec6bc46e98 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.047707] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1013.047882] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1013.048761] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b714462-4e61-4ec0-bbd3-37907a5c660b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.058167] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 1013.058167] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52c2ce7d-e18a-66ac-adb3-0c9246b22dfc" [ 1013.058167] env[61629]: _type = "Task" [ 1013.058167] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.066853] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52c2ce7d-e18a-66ac-adb3-0c9246b22dfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.120452] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354555, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482439} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.120736] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] a83f05b7-f998-4f45-afc1-836fae7c4b95/a83f05b7-f998-4f45-afc1-836fae7c4b95.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1013.120950] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1013.121230] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e98e1238-b9b6-4db5-a5f1-61307ed4fd70 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.129371] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1013.129371] env[61629]: value = "task-1354556" [ 1013.129371] env[61629]: _type = "Task" [ 1013.129371] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.135313] env[61629]: DEBUG oslo_concurrency.lockutils [None req-3434f6a5-51c9-4351-9d7d-a37547947ec4 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "7da77dea-fea2-43a6-a98a-6c492d1a041b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.550s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.140310] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354556, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.310195] env[61629]: DEBUG nova.network.neutron [req-3732a979-931e-4430-b319-eeeeba879ea7 req-7103ad03-8a8c-4496-9b09-975547c22c8b service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Updated VIF entry in instance network info cache for port 57805f12-9b81-4485-8f3a-32567ed40a8c. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1013.310573] env[61629]: DEBUG nova.network.neutron [req-3732a979-931e-4430-b319-eeeeba879ea7 req-7103ad03-8a8c-4496-9b09-975547c22c8b service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Updating instance_info_cache with network_info: [{"id": "57805f12-9b81-4485-8f3a-32567ed40a8c", "address": "fa:16:3e:ca:29:87", "network": {"id": "249c4ba3-38e0-421a-91b6-cf97f90eb535", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1700423127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd318d29ec50427eb997c83837120c9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57805f12-9b", "ovs_interfaceid": "57805f12-9b81-4485-8f3a-32567ed40a8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.568596] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Preparing fetch location {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1013.568937] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Fetch image to [datastore1] OSTACK_IMG_d0a3c23c-5ec8-4b1a-8295-238a8504da21/OSTACK_IMG_d0a3c23c-5ec8-4b1a-8295-238a8504da21.vmdk {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1013.569086] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Downloading stream optimized image d1404c99-66c8-439f-b330-d6263aa8a1a3 to [datastore1] OSTACK_IMG_d0a3c23c-5ec8-4b1a-8295-238a8504da21/OSTACK_IMG_d0a3c23c-5ec8-4b1a-8295-238a8504da21.vmdk on the data store datastore1 as vApp {{(pid=61629) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1013.569281] env[61629]: DEBUG nova.virt.vmwareapi.images [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Downloading image file data d1404c99-66c8-439f-b330-d6263aa8a1a3 to the ESX as VM named 'OSTACK_IMG_d0a3c23c-5ec8-4b1a-8295-238a8504da21' {{(pid=61629) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1013.605133] env[61629]: DEBUG nova.network.neutron [-] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.639962] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354556, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069212} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.640826] env[61629]: DEBUG oslo_vmware.rw_handles [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1013.640826] env[61629]: value = "resgroup-9" [ 1013.640826] env[61629]: _type = "ResourcePool" [ 1013.640826] env[61629]: }. {{(pid=61629) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1013.641146] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1013.641418] env[61629]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-25ca3d4e-f042-4250-ab2e-d64ed8920896 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.656666] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748116ac-ca30-4c5e-9271-c6f860fddc85 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.677582] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] a83f05b7-f998-4f45-afc1-836fae7c4b95/a83f05b7-f998-4f45-afc1-836fae7c4b95.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1013.678935] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-285d3dcb-1e2d-4d5b-b5ca-783b958fcf6e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.693056] env[61629]: DEBUG oslo_vmware.rw_handles [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lease: (returnval){ [ 1013.693056] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52616ea4-a131-ea96-aae6-cc0aa6aac601" [ 1013.693056] env[61629]: _type = "HttpNfcLease" [ 1013.693056] env[61629]: } obtained for vApp import into resource pool (val){ [ 1013.693056] env[61629]: value = "resgroup-9" [ 1013.693056] env[61629]: _type = "ResourcePool" [ 1013.693056] env[61629]: }. {{(pid=61629) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1013.693371] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the lease: (returnval){ [ 1013.693371] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52616ea4-a131-ea96-aae6-cc0aa6aac601" [ 1013.693371] env[61629]: _type = "HttpNfcLease" [ 1013.693371] env[61629]: } to be ready. {{(pid=61629) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1013.701211] env[61629]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1013.701211] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52616ea4-a131-ea96-aae6-cc0aa6aac601" [ 1013.701211] env[61629]: _type = "HttpNfcLease" [ 1013.701211] env[61629]: } is initializing. {{(pid=61629) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1013.702792] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1013.702792] env[61629]: value = "task-1354558" [ 1013.702792] env[61629]: _type = "Task" [ 1013.702792] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.714663] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354558, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.814358] env[61629]: DEBUG oslo_concurrency.lockutils [req-3732a979-931e-4430-b319-eeeeba879ea7 req-7103ad03-8a8c-4496-9b09-975547c22c8b service nova] Releasing lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1013.944869] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "7da77dea-fea2-43a6-a98a-6c492d1a041b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.945226] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "7da77dea-fea2-43a6-a98a-6c492d1a041b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.945474] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "7da77dea-fea2-43a6-a98a-6c492d1a041b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.945704] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "7da77dea-fea2-43a6-a98a-6c492d1a041b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.945906] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "7da77dea-fea2-43a6-a98a-6c492d1a041b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.949042] env[61629]: INFO nova.compute.manager [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Terminating instance [ 1013.950795] env[61629]: DEBUG nova.compute.manager [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1013.951018] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1013.951932] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c60c84-b04c-4f55-9ec7-1e3fa1968b7a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.963037] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1013.963315] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-683bcd16-6970-4320-9800-b75dd82bf671 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.039434] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1014.039883] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1014.040173] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleting the datastore file [datastore2] 7da77dea-fea2-43a6-a98a-6c492d1a041b {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1014.040507] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a5ebe21c-ff1f-4b0c-a37f-f74524740974 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.048529] env[61629]: DEBUG oslo_vmware.api [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1014.048529] env[61629]: value = "task-1354560" [ 1014.048529] env[61629]: _type = "Task" [ 1014.048529] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.057664] env[61629]: DEBUG oslo_vmware.api [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354560, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.107978] env[61629]: INFO nova.compute.manager [-] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Took 1.27 seconds to deallocate network for instance. [ 1014.202667] env[61629]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1014.202667] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52616ea4-a131-ea96-aae6-cc0aa6aac601" [ 1014.202667] env[61629]: _type = "HttpNfcLease" [ 1014.202667] env[61629]: } is initializing. {{(pid=61629) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1014.213101] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354558, 'name': ReconfigVM_Task, 'duration_secs': 0.353632} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.213433] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Reconfigured VM instance instance-00000063 to attach disk [datastore1] a83f05b7-f998-4f45-afc1-836fae7c4b95/a83f05b7-f998-4f45-afc1-836fae7c4b95.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1014.214090] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-87d8cce4-2d41-4dfb-9fcc-50deb5127e9d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.221122] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1014.221122] env[61629]: value = "task-1354561" [ 1014.221122] env[61629]: _type = "Task" [ 1014.221122] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.228935] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354561, 'name': Rename_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.558974] env[61629]: DEBUG oslo_vmware.api [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354560, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.266057} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.559333] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1014.559537] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1014.559713] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1014.559893] env[61629]: INFO nova.compute.manager [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1014.560179] env[61629]: DEBUG oslo.service.loopingcall [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1014.560400] env[61629]: DEBUG nova.compute.manager [-] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1014.560524] env[61629]: DEBUG nova.network.neutron [-] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1014.616918] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.617298] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.617564] env[61629]: DEBUG nova.objects.instance [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lazy-loading 'resources' on Instance uuid 22f71f92-ca9a-4b97-a652-3f34a0dabde2 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.626628] env[61629]: DEBUG nova.compute.manager [req-60a9dbe0-cec3-4c5c-b406-6c024cf91f4d req-cd82eef0-e60e-4192-8ae6-60f260dc8415 service nova] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Received event network-vif-deleted-5cbaf922-48cc-4c43-94b0-e00c9c88c48f {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1014.703306] env[61629]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1014.703306] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52616ea4-a131-ea96-aae6-cc0aa6aac601" [ 1014.703306] env[61629]: _type = "HttpNfcLease" [ 1014.703306] env[61629]: } is ready. {{(pid=61629) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1014.703607] env[61629]: DEBUG oslo_vmware.rw_handles [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1014.703607] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52616ea4-a131-ea96-aae6-cc0aa6aac601" [ 1014.703607] env[61629]: _type = "HttpNfcLease" [ 1014.703607] env[61629]: }. {{(pid=61629) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1014.704343] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91773f77-b6d3-4f0e-9d0b-cac2be30210c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.711568] env[61629]: DEBUG oslo_vmware.rw_handles [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a8b9a6-ac21-ad74-fd44-a694ea33c45a/disk-0.vmdk from lease info. {{(pid=61629) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1014.711750] env[61629]: DEBUG oslo_vmware.rw_handles [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Creating HTTP connection to write to file with size = 31665664 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a8b9a6-ac21-ad74-fd44-a694ea33c45a/disk-0.vmdk. {{(pid=61629) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1014.792111] env[61629]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-61e25d43-aa48-49c1-adfb-b801915e7346 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.794445] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354561, 'name': Rename_Task, 'duration_secs': 0.159562} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.794816] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1014.795562] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79ab4641-8823-4790-9c8b-ecbe80bc6b30 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.803907] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1014.803907] env[61629]: value = "task-1354562" [ 1014.803907] env[61629]: _type = "Task" [ 1014.803907] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.811834] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354562, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.271697] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703ae50f-bfe9-438f-8d9c-40abb955649f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.280313] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0a6e19-35da-44c9-a5b7-ea8aed8c296a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.314259] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c05e75-bd9f-48a5-a146-02d010e807ce {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.325100] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a615281a-cdb5-4e61-a277-7ab4b2190e2a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.328686] env[61629]: DEBUG nova.network.neutron [-] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.329894] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354562, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.340490] env[61629]: DEBUG nova.compute.provider_tree [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1015.820380] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354562, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.831108] env[61629]: INFO nova.compute.manager [-] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Took 1.27 seconds to deallocate network for instance. [ 1015.843635] env[61629]: DEBUG nova.scheduler.client.report [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1016.321223] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354562, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.337273] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.348054] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.731s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.350183] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.013s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.350411] env[61629]: DEBUG nova.objects.instance [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lazy-loading 'resources' on Instance uuid 7da77dea-fea2-43a6-a98a-6c492d1a041b {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1016.370793] env[61629]: INFO nova.scheduler.client.report [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleted allocations for instance 22f71f92-ca9a-4b97-a652-3f34a0dabde2 [ 1016.651873] env[61629]: DEBUG nova.compute.manager [req-df8ff5b4-50f5-4bec-9aa1-1f15fc6c97f6 req-9194db11-1791-4082-8c60-828619988822 service nova] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Received event network-vif-deleted-cb382e93-c231-4c57-bab2-1adf21156500 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1016.821262] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354562, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.879028] env[61629]: DEBUG oslo_concurrency.lockutils [None req-abd26855-8513-4463-9722-d1579ecb5792 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "22f71f92-ca9a-4b97-a652-3f34a0dabde2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.182s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.961924] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ce9e2a-8501-4ad2-85e3-32c650346a16 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.969671] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0c840c-84f1-4498-86ba-b3664651d85f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.000354] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a4b438-79fc-443a-8f90-ef77f49b037c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.007781] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af8fd37-f64b-40f1-914b-88dbdad243fd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.020695] env[61629]: DEBUG nova.compute.provider_tree [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1017.321295] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354562, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.525047] env[61629]: DEBUG nova.scheduler.client.report [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1017.637749] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "c99de956-c382-4203-b2a7-d3f8709d188a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.637989] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "c99de956-c382-4203-b2a7-d3f8709d188a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.823494] env[61629]: DEBUG oslo_vmware.api [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354562, 'name': PowerOnVM_Task, 'duration_secs': 2.959469} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.823883] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1017.824047] env[61629]: DEBUG nova.compute.manager [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1017.824858] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e76c13-ab9b-47f9-92e3-2e6f889489ae {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.029510] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.679s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.054694] env[61629]: INFO nova.scheduler.client.report [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleted allocations for instance 7da77dea-fea2-43a6-a98a-6c492d1a041b [ 1018.140900] env[61629]: DEBUG nova.compute.manager [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1018.344971] env[61629]: DEBUG oslo_concurrency.lockutils [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.345327] env[61629]: DEBUG oslo_concurrency.lockutils [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.345567] env[61629]: DEBUG nova.objects.instance [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61629) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1018.419414] env[61629]: DEBUG oslo_vmware.rw_handles [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Completed reading data from the image iterator. {{(pid=61629) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1018.419678] env[61629]: DEBUG oslo_vmware.rw_handles [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a8b9a6-ac21-ad74-fd44-a694ea33c45a/disk-0.vmdk. {{(pid=61629) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1018.420693] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f9dbf9-dd81-4337-b486-ffd0bcd662ac {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.428175] env[61629]: DEBUG oslo_vmware.rw_handles [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a8b9a6-ac21-ad74-fd44-a694ea33c45a/disk-0.vmdk is in state: ready. {{(pid=61629) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1018.428355] env[61629]: DEBUG oslo_vmware.rw_handles [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a8b9a6-ac21-ad74-fd44-a694ea33c45a/disk-0.vmdk. {{(pid=61629) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1018.428602] env[61629]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-27e4ce33-51ec-43ec-a288-8cfaf0edc8c2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.566821] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f61c068f-7bbe-4f0a-9b07-7861965c1659 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "7da77dea-fea2-43a6-a98a-6c492d1a041b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.621s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.662397] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.955405] env[61629]: DEBUG oslo_vmware.rw_handles [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a8b9a6-ac21-ad74-fd44-a694ea33c45a/disk-0.vmdk. {{(pid=61629) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1018.955737] env[61629]: INFO nova.virt.vmwareapi.images [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Downloaded image file data d1404c99-66c8-439f-b330-d6263aa8a1a3 [ 1018.956576] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677a8688-6663-4662-8963-f1585664625e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.971713] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-275d0b07-cf6c-4077-9484-f4633310df19 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.993464] env[61629]: DEBUG oslo_concurrency.lockutils [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquiring lock "a83f05b7-f998-4f45-afc1-836fae7c4b95" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.993766] env[61629]: DEBUG oslo_concurrency.lockutils [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Lock "a83f05b7-f998-4f45-afc1-836fae7c4b95" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.993987] env[61629]: DEBUG oslo_concurrency.lockutils [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquiring lock "a83f05b7-f998-4f45-afc1-836fae7c4b95-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.994233] env[61629]: DEBUG oslo_concurrency.lockutils [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Lock "a83f05b7-f998-4f45-afc1-836fae7c4b95-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.994465] env[61629]: DEBUG oslo_concurrency.lockutils [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Lock "a83f05b7-f998-4f45-afc1-836fae7c4b95-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.996668] env[61629]: INFO nova.compute.manager [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Terminating instance [ 1018.998301] env[61629]: DEBUG oslo_concurrency.lockutils [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquiring lock "refresh_cache-a83f05b7-f998-4f45-afc1-836fae7c4b95" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.998472] env[61629]: DEBUG oslo_concurrency.lockutils [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquired lock "refresh_cache-a83f05b7-f998-4f45-afc1-836fae7c4b95" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.998642] env[61629]: DEBUG nova.network.neutron [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1019.168055] env[61629]: INFO nova.virt.vmwareapi.images [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] The imported VM was unregistered [ 1019.168560] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Caching image {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1019.168926] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Creating directory with path [datastore1] devstack-image-cache_base/d1404c99-66c8-439f-b330-d6263aa8a1a3 {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1019.169390] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ec29e8a-62c5-4e37-9e49-b52de327e89f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.189372] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Created directory with path [datastore1] devstack-image-cache_base/d1404c99-66c8-439f-b330-d6263aa8a1a3 {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1019.189603] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_d0a3c23c-5ec8-4b1a-8295-238a8504da21/OSTACK_IMG_d0a3c23c-5ec8-4b1a-8295-238a8504da21.vmdk to [datastore1] devstack-image-cache_base/d1404c99-66c8-439f-b330-d6263aa8a1a3/d1404c99-66c8-439f-b330-d6263aa8a1a3.vmdk. {{(pid=61629) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1019.189804] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-8721431e-d662-4c05-b23b-7bb1e4991cb3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.196117] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 1019.196117] env[61629]: value = "task-1354564" [ 1019.196117] env[61629]: _type = "Task" [ 1019.196117] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.203779] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354564, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.353934] env[61629]: DEBUG oslo_concurrency.lockutils [None req-505a79ea-dca2-4acc-ad35-ee58555a0e49 tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.355194] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.693s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.356805] env[61629]: INFO nova.compute.claims [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1019.516071] env[61629]: DEBUG nova.network.neutron [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1019.563557] env[61629]: DEBUG nova.network.neutron [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.707366] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354564, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.863711] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "3b116c59-a904-4b68-9c74-58954b3de240" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.863967] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "3b116c59-a904-4b68-9c74-58954b3de240" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.066676] env[61629]: DEBUG oslo_concurrency.lockutils [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Releasing lock "refresh_cache-a83f05b7-f998-4f45-afc1-836fae7c4b95" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.067091] env[61629]: DEBUG nova.compute.manager [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1020.067332] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.068272] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d56f23f-a4ef-4dd3-ad75-6003e3e6fba5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.078233] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1020.078512] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbee0fca-5247-430e-82f4-39f4d3f770c6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.087459] env[61629]: DEBUG oslo_vmware.api [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1020.087459] env[61629]: value = "task-1354565" [ 1020.087459] env[61629]: _type = "Task" [ 1020.087459] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.097779] env[61629]: DEBUG oslo_vmware.api [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.208972] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354564, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.368665] env[61629]: DEBUG nova.compute.manager [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1020.506423] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e845eac0-27fb-4acf-b305-be62121023f8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.516806] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6c36b9-4102-4430-aaa4-1f51f9c5b627 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.551883] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3347f342-bc54-42c2-a57c-d5ccb4b76cac {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.562361] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f71011-ac24-4747-b0e9-59161f7897e2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.579158] env[61629]: DEBUG nova.compute.provider_tree [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.600146] env[61629]: DEBUG oslo_vmware.api [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354565, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.709622] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354564, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.891619] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.083408] env[61629]: DEBUG nova.scheduler.client.report [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1021.108184] env[61629]: DEBUG oslo_vmware.api [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354565, 'name': PowerOffVM_Task, 'duration_secs': 0.538102} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.109223] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1021.109453] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1021.109744] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-769ce695-f743-4fa6-86b3-b970cd3d5e86 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.139977] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1021.140204] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1021.140461] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Deleting the datastore file [datastore1] a83f05b7-f998-4f45-afc1-836fae7c4b95 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1021.140802] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8be8e088-0982-45b8-a92e-ccfccab1a066 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.150797] env[61629]: DEBUG oslo_vmware.api [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for the task: (returnval){ [ 1021.150797] env[61629]: value = "task-1354567" [ 1021.150797] env[61629]: _type = "Task" [ 1021.150797] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.163065] env[61629]: DEBUG oslo_vmware.api [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.210604] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354564, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.600067] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.245s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.600663] env[61629]: DEBUG nova.compute.manager [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1021.603989] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.713s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.605797] env[61629]: INFO nova.compute.claims [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1021.663416] env[61629]: DEBUG oslo_vmware.api [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.715248] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354564, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.110467] env[61629]: DEBUG nova.compute.utils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1022.113968] env[61629]: DEBUG nova.compute.manager [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1022.113968] env[61629]: DEBUG nova.network.neutron [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1022.162448] env[61629]: DEBUG oslo_vmware.api [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Task: {'id': task-1354567, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.863652} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.163883] env[61629]: DEBUG nova.policy [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c62f9a7c8b5f4ef985880339407b46a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0578ce75c37942d4ba6c8b862ceb7d92', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 1022.165296] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1022.165499] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1022.165691] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1022.165865] env[61629]: INFO nova.compute.manager [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Took 2.10 seconds to destroy the instance on the hypervisor. [ 1022.166127] env[61629]: DEBUG oslo.service.loopingcall [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1022.166343] env[61629]: DEBUG nova.compute.manager [-] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1022.166439] env[61629]: DEBUG nova.network.neutron [-] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1022.188666] env[61629]: DEBUG nova.network.neutron [-] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1022.211787] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354564, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.836302} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.212126] env[61629]: INFO nova.virt.vmwareapi.ds_util [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_d0a3c23c-5ec8-4b1a-8295-238a8504da21/OSTACK_IMG_d0a3c23c-5ec8-4b1a-8295-238a8504da21.vmdk to [datastore1] devstack-image-cache_base/d1404c99-66c8-439f-b330-d6263aa8a1a3/d1404c99-66c8-439f-b330-d6263aa8a1a3.vmdk. [ 1022.212389] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Cleaning up location [datastore1] OSTACK_IMG_d0a3c23c-5ec8-4b1a-8295-238a8504da21 {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1022.212602] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_d0a3c23c-5ec8-4b1a-8295-238a8504da21 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1022.212878] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-363f76a4-600a-4f9b-a161-260f079ce36b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.220042] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 1022.220042] env[61629]: value = "task-1354568" [ 1022.220042] env[61629]: _type = "Task" [ 1022.220042] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.228201] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354568, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.425583] env[61629]: DEBUG nova.network.neutron [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Successfully created port: 740d1d45-2daa-44df-ac77-406865a8b131 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1022.615258] env[61629]: DEBUG nova.compute.manager [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1022.691572] env[61629]: DEBUG nova.network.neutron [-] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.731828] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354568, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.037853} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.732165] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1022.732348] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d1404c99-66c8-439f-b330-d6263aa8a1a3/d1404c99-66c8-439f-b330-d6263aa8a1a3.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.732602] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d1404c99-66c8-439f-b330-d6263aa8a1a3/d1404c99-66c8-439f-b330-d6263aa8a1a3.vmdk to [datastore1] c5b6f6b8-587c-4b74-bc83-98dac319b15b/c5b6f6b8-587c-4b74-bc83-98dac319b15b.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1022.732860] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f66d006d-3cbc-447d-b5fa-252e44c41dc0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.742000] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 1022.742000] env[61629]: value = "task-1354569" [ 1022.742000] env[61629]: _type = "Task" [ 1022.742000] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.746539] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89636af-e0a4-40cc-a6a4-af658b26938c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.754061] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.756864] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abba5fa1-9a91-4a5c-90a2-2cd0a7d854a7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.788031] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64df2d61-40ed-4b99-bb94-4a662d5f3246 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.795590] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a89280-4862-4603-b205-2e3c96961c3b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.810396] env[61629]: DEBUG nova.compute.provider_tree [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.194538] env[61629]: INFO nova.compute.manager [-] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Took 1.03 seconds to deallocate network for instance. [ 1023.254682] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354569, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.314686] env[61629]: DEBUG nova.scheduler.client.report [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1023.628075] env[61629]: DEBUG nova.compute.manager [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1023.658387] env[61629]: DEBUG nova.virt.hardware [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1023.658674] env[61629]: DEBUG nova.virt.hardware [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1023.658839] env[61629]: DEBUG nova.virt.hardware [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1023.659041] env[61629]: DEBUG nova.virt.hardware [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1023.659210] env[61629]: DEBUG nova.virt.hardware [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1023.659468] env[61629]: DEBUG nova.virt.hardware [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1023.659694] env[61629]: DEBUG nova.virt.hardware [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1023.659859] env[61629]: DEBUG nova.virt.hardware [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1023.660044] env[61629]: DEBUG nova.virt.hardware [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1023.660219] env[61629]: DEBUG nova.virt.hardware [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1023.660631] env[61629]: DEBUG nova.virt.hardware [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1023.661549] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa6c8bf-eb34-4222-a9f3-c1f32fe84746 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.672969] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245c28e6-4ce8-4f54-8f7a-64a4cba7d907 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.701725] env[61629]: DEBUG oslo_concurrency.lockutils [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.754078] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354569, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.822420] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.218s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.822986] env[61629]: DEBUG nova.compute.manager [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1023.825726] env[61629]: DEBUG oslo_concurrency.lockutils [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.124s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.825968] env[61629]: DEBUG nova.objects.instance [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Lazy-loading 'resources' on Instance uuid a83f05b7-f998-4f45-afc1-836fae7c4b95 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1024.096760] env[61629]: DEBUG nova.compute.manager [req-ee8da84d-4ab6-4f8a-adbf-37edd6bf2a86 req-64ece543-ee11-4d99-b98b-fa0121aa468f service nova] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Received event network-vif-plugged-740d1d45-2daa-44df-ac77-406865a8b131 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1024.097060] env[61629]: DEBUG oslo_concurrency.lockutils [req-ee8da84d-4ab6-4f8a-adbf-37edd6bf2a86 req-64ece543-ee11-4d99-b98b-fa0121aa468f service nova] Acquiring lock "c99de956-c382-4203-b2a7-d3f8709d188a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.097273] env[61629]: DEBUG oslo_concurrency.lockutils [req-ee8da84d-4ab6-4f8a-adbf-37edd6bf2a86 req-64ece543-ee11-4d99-b98b-fa0121aa468f service nova] Lock "c99de956-c382-4203-b2a7-d3f8709d188a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.097453] env[61629]: DEBUG oslo_concurrency.lockutils [req-ee8da84d-4ab6-4f8a-adbf-37edd6bf2a86 req-64ece543-ee11-4d99-b98b-fa0121aa468f service nova] Lock "c99de956-c382-4203-b2a7-d3f8709d188a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.097613] env[61629]: DEBUG nova.compute.manager [req-ee8da84d-4ab6-4f8a-adbf-37edd6bf2a86 req-64ece543-ee11-4d99-b98b-fa0121aa468f service nova] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] No waiting events found dispatching network-vif-plugged-740d1d45-2daa-44df-ac77-406865a8b131 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1024.097788] env[61629]: WARNING nova.compute.manager [req-ee8da84d-4ab6-4f8a-adbf-37edd6bf2a86 req-64ece543-ee11-4d99-b98b-fa0121aa468f service nova] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Received unexpected event network-vif-plugged-740d1d45-2daa-44df-ac77-406865a8b131 for instance with vm_state building and task_state spawning. [ 1024.196735] env[61629]: DEBUG nova.network.neutron [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Successfully updated port: 740d1d45-2daa-44df-ac77-406865a8b131 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1024.255092] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354569, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.329755] env[61629]: DEBUG nova.compute.utils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1024.334224] env[61629]: DEBUG nova.compute.manager [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1024.334419] env[61629]: DEBUG nova.network.neutron [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1024.375852] env[61629]: DEBUG nova.policy [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec39705b9dd24915a0b3723ea45a85d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38efdd2cc07f45a49fb06d590aafb96b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 1024.464269] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ecba6a-e985-4bea-9028-bd8a618edb0c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.475864] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e336375-ca2e-43a1-9040-06e5894e3215 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.508568] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d6bf06-5407-498b-9627-c43329c538e6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.517302] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aff19f7-e638-4c40-beb0-a8edc6a0648e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.532728] env[61629]: DEBUG nova.compute.provider_tree [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1024.682784] env[61629]: DEBUG nova.network.neutron [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Successfully created port: 2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1024.702855] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "refresh_cache-c99de956-c382-4203-b2a7-d3f8709d188a" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.702855] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "refresh_cache-c99de956-c382-4203-b2a7-d3f8709d188a" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.702855] env[61629]: DEBUG nova.network.neutron [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1024.757662] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354569, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.835253] env[61629]: DEBUG nova.compute.manager [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1025.036559] env[61629]: DEBUG nova.scheduler.client.report [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1025.237283] env[61629]: DEBUG nova.network.neutron [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1025.254937] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354569, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.231503} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.257219] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d1404c99-66c8-439f-b330-d6263aa8a1a3/d1404c99-66c8-439f-b330-d6263aa8a1a3.vmdk to [datastore1] c5b6f6b8-587c-4b74-bc83-98dac319b15b/c5b6f6b8-587c-4b74-bc83-98dac319b15b.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1025.258144] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec775e5-90a8-49ab-b399-b9907e8702e2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.281498] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] c5b6f6b8-587c-4b74-bc83-98dac319b15b/c5b6f6b8-587c-4b74-bc83-98dac319b15b.vmdk or device None with type streamOptimized {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1025.284028] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51e0d4a0-c674-4bd9-bb3e-97483374438b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.305097] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 1025.305097] env[61629]: value = "task-1354570" [ 1025.305097] env[61629]: _type = "Task" [ 1025.305097] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.312662] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354570, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.387659] env[61629]: DEBUG nova.network.neutron [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Updating instance_info_cache with network_info: [{"id": "740d1d45-2daa-44df-ac77-406865a8b131", "address": "fa:16:3e:1c:52:d6", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap740d1d45-2d", "ovs_interfaceid": "740d1d45-2daa-44df-ac77-406865a8b131", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.541424] env[61629]: DEBUG oslo_concurrency.lockutils [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.716s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.563020] env[61629]: INFO nova.scheduler.client.report [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Deleted allocations for instance a83f05b7-f998-4f45-afc1-836fae7c4b95 [ 1025.814131] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354570, 'name': ReconfigVM_Task, 'duration_secs': 0.299741} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.814469] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Reconfigured VM instance instance-00000057 to attach disk [datastore1] c5b6f6b8-587c-4b74-bc83-98dac319b15b/c5b6f6b8-587c-4b74-bc83-98dac319b15b.vmdk or device None with type streamOptimized {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1025.815088] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-013c4280-8a5d-48e5-a523-df764b345458 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.821419] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 1025.821419] env[61629]: value = "task-1354571" [ 1025.821419] env[61629]: _type = "Task" [ 1025.821419] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.828877] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354571, 'name': Rename_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.844119] env[61629]: DEBUG nova.compute.manager [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1025.869183] env[61629]: DEBUG nova.virt.hardware [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1025.869479] env[61629]: DEBUG nova.virt.hardware [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1025.869649] env[61629]: DEBUG nova.virt.hardware [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1025.869836] env[61629]: DEBUG nova.virt.hardware [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1025.869989] env[61629]: DEBUG nova.virt.hardware [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1025.870156] env[61629]: DEBUG nova.virt.hardware [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1025.870371] env[61629]: DEBUG nova.virt.hardware [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1025.870535] env[61629]: DEBUG nova.virt.hardware [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1025.870705] env[61629]: DEBUG nova.virt.hardware [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1025.870871] env[61629]: DEBUG nova.virt.hardware [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1025.871080] env[61629]: DEBUG nova.virt.hardware [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1025.871927] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361bf981-05f9-4585-ab84-c7f6f2281830 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.879589] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363e0253-f3da-474b-98aa-b78aaa4050c9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.892953] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "refresh_cache-c99de956-c382-4203-b2a7-d3f8709d188a" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.893295] env[61629]: DEBUG nova.compute.manager [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Instance network_info: |[{"id": "740d1d45-2daa-44df-ac77-406865a8b131", "address": "fa:16:3e:1c:52:d6", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap740d1d45-2d", "ovs_interfaceid": "740d1d45-2daa-44df-ac77-406865a8b131", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1025.893834] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:52:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '740d1d45-2daa-44df-ac77-406865a8b131', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1025.901176] env[61629]: DEBUG oslo.service.loopingcall [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1025.901382] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1025.901592] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f2437aa-284d-407d-9714-454a766e3942 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.921715] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1025.921715] env[61629]: value = "task-1354572" [ 1025.921715] env[61629]: _type = "Task" [ 1025.921715] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.930462] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354572, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.071129] env[61629]: DEBUG oslo_concurrency.lockutils [None req-38009b65-3665-4833-b66d-74fdc9f27a0e tempest-ServerShowV257Test-2001617408 tempest-ServerShowV257Test-2001617408-project-member] Lock "a83f05b7-f998-4f45-afc1-836fae7c4b95" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.077s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.125866] env[61629]: DEBUG nova.compute.manager [req-97cf070d-e203-4085-b23f-ba16c80aad09 req-c0cd1a19-8524-403d-a8d0-6c397a80ca58 service nova] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Received event network-changed-740d1d45-2daa-44df-ac77-406865a8b131 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1026.126286] env[61629]: DEBUG nova.compute.manager [req-97cf070d-e203-4085-b23f-ba16c80aad09 req-c0cd1a19-8524-403d-a8d0-6c397a80ca58 service nova] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Refreshing instance network info cache due to event network-changed-740d1d45-2daa-44df-ac77-406865a8b131. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1026.126383] env[61629]: DEBUG oslo_concurrency.lockutils [req-97cf070d-e203-4085-b23f-ba16c80aad09 req-c0cd1a19-8524-403d-a8d0-6c397a80ca58 service nova] Acquiring lock "refresh_cache-c99de956-c382-4203-b2a7-d3f8709d188a" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.126451] env[61629]: DEBUG oslo_concurrency.lockutils [req-97cf070d-e203-4085-b23f-ba16c80aad09 req-c0cd1a19-8524-403d-a8d0-6c397a80ca58 service nova] Acquired lock "refresh_cache-c99de956-c382-4203-b2a7-d3f8709d188a" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.126660] env[61629]: DEBUG nova.network.neutron [req-97cf070d-e203-4085-b23f-ba16c80aad09 req-c0cd1a19-8524-403d-a8d0-6c397a80ca58 service nova] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Refreshing network info cache for port 740d1d45-2daa-44df-ac77-406865a8b131 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1026.286093] env[61629]: DEBUG nova.network.neutron [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Successfully updated port: 2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1026.330594] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354571, 'name': Rename_Task, 'duration_secs': 0.139804} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.330894] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1026.331155] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98b37d9d-7e0c-468e-b7be-aefd9b2ee10c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.336961] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 1026.336961] env[61629]: value = "task-1354573" [ 1026.336961] env[61629]: _type = "Task" [ 1026.336961] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.350051] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354573, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.432084] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354572, 'name': CreateVM_Task, 'duration_secs': 0.355327} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.432222] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1026.432964] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.433159] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.433498] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1026.433762] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fb56106-5bb4-4933-8311-648eddb87f24 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.438159] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1026.438159] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]525f0a5b-049f-b999-1090-c1630cb15908" [ 1026.438159] env[61629]: _type = "Task" [ 1026.438159] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.445766] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]525f0a5b-049f-b999-1090-c1630cb15908, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.790302] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "refresh_cache-3b116c59-a904-4b68-9c74-58954b3de240" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.790442] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "refresh_cache-3b116c59-a904-4b68-9c74-58954b3de240" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.790646] env[61629]: DEBUG nova.network.neutron [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1026.832385] env[61629]: DEBUG nova.network.neutron [req-97cf070d-e203-4085-b23f-ba16c80aad09 req-c0cd1a19-8524-403d-a8d0-6c397a80ca58 service nova] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Updated VIF entry in instance network info cache for port 740d1d45-2daa-44df-ac77-406865a8b131. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1026.832722] env[61629]: DEBUG nova.network.neutron [req-97cf070d-e203-4085-b23f-ba16c80aad09 req-c0cd1a19-8524-403d-a8d0-6c397a80ca58 service nova] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Updating instance_info_cache with network_info: [{"id": "740d1d45-2daa-44df-ac77-406865a8b131", "address": "fa:16:3e:1c:52:d6", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap740d1d45-2d", "ovs_interfaceid": "740d1d45-2daa-44df-ac77-406865a8b131", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.847546] env[61629]: DEBUG oslo_vmware.api [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354573, 'name': PowerOnVM_Task, 'duration_secs': 0.459387} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.847829] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1026.949021] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]525f0a5b-049f-b999-1090-c1630cb15908, 'name': SearchDatastore_Task, 'duration_secs': 0.01556} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.949460] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.949720] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1026.949956] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.950557] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.950557] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1026.951380] env[61629]: DEBUG nova.compute.manager [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1026.951547] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47629cf1-c0d8-4f9b-97f2-f43bbcbb52b4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.953851] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13559351-872b-44c8-ab25-457801999596 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.963937] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1026.964136] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1026.964998] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-498ddab3-5d04-4e85-9cf3-8fba27d17562 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.969758] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1026.969758] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52fd7af5-6cb0-573e-f457-a72a84d34d86" [ 1026.969758] env[61629]: _type = "Task" [ 1026.969758] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.978304] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52fd7af5-6cb0-573e-f457-a72a84d34d86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.335427] env[61629]: DEBUG oslo_concurrency.lockutils [req-97cf070d-e203-4085-b23f-ba16c80aad09 req-c0cd1a19-8524-403d-a8d0-6c397a80ca58 service nova] Releasing lock "refresh_cache-c99de956-c382-4203-b2a7-d3f8709d188a" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.336689] env[61629]: DEBUG nova.network.neutron [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1027.475101] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1ce60b1c-4a13-460f-ac24-6d172c6727db tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 23.279s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.490138] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52fd7af5-6cb0-573e-f457-a72a84d34d86, 'name': SearchDatastore_Task, 'duration_secs': 0.013158} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.491074] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25c4976f-1640-46fb-82a6-6d4ccc7753a3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.498686] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1027.498686] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]529abeac-cf82-e7b4-1468-5ff36a0e7970" [ 1027.498686] env[61629]: _type = "Task" [ 1027.498686] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.503342] env[61629]: DEBUG nova.network.neutron [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Updating instance_info_cache with network_info: [{"id": "2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07", "address": "fa:16:3e:5c:ec:35", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ca66e03-30", "ovs_interfaceid": "2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.512120] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]529abeac-cf82-e7b4-1468-5ff36a0e7970, 'name': SearchDatastore_Task, 'duration_secs': 0.010293} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.512757] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.512845] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] c99de956-c382-4203-b2a7-d3f8709d188a/c99de956-c382-4203-b2a7-d3f8709d188a.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1027.513398] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65703522-998d-48d4-970b-2a047ec85dcd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.519836] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1027.519836] env[61629]: value = "task-1354574" [ 1027.519836] env[61629]: _type = "Task" [ 1027.519836] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.528098] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354574, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.007055] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "refresh_cache-3b116c59-a904-4b68-9c74-58954b3de240" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.007369] env[61629]: DEBUG nova.compute.manager [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Instance network_info: |[{"id": "2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07", "address": "fa:16:3e:5c:ec:35", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ca66e03-30", "ovs_interfaceid": "2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1028.007817] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:ec:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1028.016481] env[61629]: DEBUG oslo.service.loopingcall [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1028.016729] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1028.016968] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dadc4b7d-02a2-4e4d-a00b-a6b10421d429 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.036733] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Acquiring lock "0daebf05-e42b-49c5-aa24-43304a1c3cc0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.036733] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Lock "0daebf05-e42b-49c5-aa24-43304a1c3cc0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.036733] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Acquiring lock "0daebf05-e42b-49c5-aa24-43304a1c3cc0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.038313] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Lock "0daebf05-e42b-49c5-aa24-43304a1c3cc0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.038313] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Lock "0daebf05-e42b-49c5-aa24-43304a1c3cc0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.040249] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1028.040249] env[61629]: value = "task-1354575" [ 1028.040249] env[61629]: _type = "Task" [ 1028.040249] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.046507] env[61629]: INFO nova.compute.manager [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Terminating instance [ 1028.048204] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354574, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476117} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.048656] env[61629]: DEBUG nova.compute.manager [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1028.048854] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1028.049482] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] c99de956-c382-4203-b2a7-d3f8709d188a/c99de956-c382-4203-b2a7-d3f8709d188a.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1028.049709] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1028.050521] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19528063-de99-45b4-91e5-36331c3c3064 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.056332] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c7c67f4-a93f-42af-a844-f87d16cd6e92 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.058318] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354575, 'name': CreateVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.063276] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1028.064370] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a3e1ecd-4f62-4063-a4f1-d1021bba17d4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.065831] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1028.065831] env[61629]: value = "task-1354576" [ 1028.065831] env[61629]: _type = "Task" [ 1028.065831] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.073984] env[61629]: DEBUG oslo_vmware.api [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Waiting for the task: (returnval){ [ 1028.073984] env[61629]: value = "task-1354577" [ 1028.073984] env[61629]: _type = "Task" [ 1028.073984] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.077589] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354576, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.085297] env[61629]: DEBUG oslo_vmware.api [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.228089] env[61629]: DEBUG nova.compute.manager [req-5d9f9cd3-0d59-4c8b-acdf-2fe4dc661c88 req-c20a3a6b-f87e-49fe-9547-25615ac574ab service nova] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Received event network-vif-plugged-2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1028.228322] env[61629]: DEBUG oslo_concurrency.lockutils [req-5d9f9cd3-0d59-4c8b-acdf-2fe4dc661c88 req-c20a3a6b-f87e-49fe-9547-25615ac574ab service nova] Acquiring lock "3b116c59-a904-4b68-9c74-58954b3de240-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.228530] env[61629]: DEBUG oslo_concurrency.lockutils [req-5d9f9cd3-0d59-4c8b-acdf-2fe4dc661c88 req-c20a3a6b-f87e-49fe-9547-25615ac574ab service nova] Lock "3b116c59-a904-4b68-9c74-58954b3de240-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.228697] env[61629]: DEBUG oslo_concurrency.lockutils [req-5d9f9cd3-0d59-4c8b-acdf-2fe4dc661c88 req-c20a3a6b-f87e-49fe-9547-25615ac574ab service nova] Lock "3b116c59-a904-4b68-9c74-58954b3de240-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.228929] env[61629]: DEBUG nova.compute.manager [req-5d9f9cd3-0d59-4c8b-acdf-2fe4dc661c88 req-c20a3a6b-f87e-49fe-9547-25615ac574ab service nova] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] No waiting events found dispatching network-vif-plugged-2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1028.229034] env[61629]: WARNING nova.compute.manager [req-5d9f9cd3-0d59-4c8b-acdf-2fe4dc661c88 req-c20a3a6b-f87e-49fe-9547-25615ac574ab service nova] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Received unexpected event network-vif-plugged-2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07 for instance with vm_state building and task_state spawning. [ 1028.229206] env[61629]: DEBUG nova.compute.manager [req-5d9f9cd3-0d59-4c8b-acdf-2fe4dc661c88 req-c20a3a6b-f87e-49fe-9547-25615ac574ab service nova] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Received event network-changed-2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1028.229406] env[61629]: DEBUG nova.compute.manager [req-5d9f9cd3-0d59-4c8b-acdf-2fe4dc661c88 req-c20a3a6b-f87e-49fe-9547-25615ac574ab service nova] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Refreshing instance network info cache due to event network-changed-2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1028.229640] env[61629]: DEBUG oslo_concurrency.lockutils [req-5d9f9cd3-0d59-4c8b-acdf-2fe4dc661c88 req-c20a3a6b-f87e-49fe-9547-25615ac574ab service nova] Acquiring lock "refresh_cache-3b116c59-a904-4b68-9c74-58954b3de240" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.229846] env[61629]: DEBUG oslo_concurrency.lockutils [req-5d9f9cd3-0d59-4c8b-acdf-2fe4dc661c88 req-c20a3a6b-f87e-49fe-9547-25615ac574ab service nova] Acquired lock "refresh_cache-3b116c59-a904-4b68-9c74-58954b3de240" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.229968] env[61629]: DEBUG nova.network.neutron [req-5d9f9cd3-0d59-4c8b-acdf-2fe4dc661c88 req-c20a3a6b-f87e-49fe-9547-25615ac574ab service nova] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Refreshing network info cache for port 2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1028.549878] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354575, 'name': CreateVM_Task, 'duration_secs': 0.323386} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.550274] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1028.550740] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.550914] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.551256] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1028.551516] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c31eee0e-78e9-4c01-b5c6-654ba5343b7a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.555523] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1028.555523] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]521d6d7b-62ef-974d-7235-9e1da4ecc8bc" [ 1028.555523] env[61629]: _type = "Task" [ 1028.555523] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.562558] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]521d6d7b-62ef-974d-7235-9e1da4ecc8bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.573364] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354576, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072105} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.573604] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1028.574318] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56bbfad7-a852-415a-ab45-bdd5f81ee99f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.599260] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] c99de956-c382-4203-b2a7-d3f8709d188a/c99de956-c382-4203-b2a7-d3f8709d188a.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1028.603744] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43ec90ab-5a86-4b9a-965f-4a9d5f6933e1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.617483] env[61629]: DEBUG oslo_vmware.api [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354577, 'name': PowerOffVM_Task, 'duration_secs': 0.176195} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.618047] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1028.618213] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1028.618890] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d6bb2b-c166-4873-b56e-8aea18b4dfeb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.621494] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5dd6a8f-0e4a-4f97-9b61-e86578c61a8b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.626529] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d28c7a6b-ffa1-4fcc-b3d1-98e22ff74848 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Suspending the VM {{(pid=61629) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1028.627577] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-bc672b95-9db4-4002-907f-c4fc0d6756bc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.628889] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1028.628889] env[61629]: value = "task-1354578" [ 1028.628889] env[61629]: _type = "Task" [ 1028.628889] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.634945] env[61629]: DEBUG oslo_vmware.api [None req-d28c7a6b-ffa1-4fcc-b3d1-98e22ff74848 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 1028.634945] env[61629]: value = "task-1354580" [ 1028.634945] env[61629]: _type = "Task" [ 1028.634945] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.637880] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354578, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.645148] env[61629]: DEBUG oslo_vmware.api [None req-d28c7a6b-ffa1-4fcc-b3d1-98e22ff74848 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354580, 'name': SuspendVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.704294] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1028.704594] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1028.704818] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Deleting the datastore file [datastore1] 0daebf05-e42b-49c5-aa24-43304a1c3cc0 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1028.705151] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ca67b76-ee85-43b9-9afd-d259b391c61c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.711507] env[61629]: DEBUG oslo_vmware.api [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Waiting for the task: (returnval){ [ 1028.711507] env[61629]: value = "task-1354581" [ 1028.711507] env[61629]: _type = "Task" [ 1028.711507] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.719854] env[61629]: DEBUG oslo_vmware.api [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.723480] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.723761] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.937978] env[61629]: DEBUG nova.network.neutron [req-5d9f9cd3-0d59-4c8b-acdf-2fe4dc661c88 req-c20a3a6b-f87e-49fe-9547-25615ac574ab service nova] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Updated VIF entry in instance network info cache for port 2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1028.938397] env[61629]: DEBUG nova.network.neutron [req-5d9f9cd3-0d59-4c8b-acdf-2fe4dc661c88 req-c20a3a6b-f87e-49fe-9547-25615ac574ab service nova] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Updating instance_info_cache with network_info: [{"id": "2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07", "address": "fa:16:3e:5c:ec:35", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ca66e03-30", "ovs_interfaceid": "2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.066318] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]521d6d7b-62ef-974d-7235-9e1da4ecc8bc, 'name': SearchDatastore_Task, 'duration_secs': 0.069025} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.066664] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.066921] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1029.067199] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.067405] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.067626] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1029.067920] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9feabcb0-08a0-433a-bdcc-4af8b4eb95e2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.085245] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1029.085404] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1029.086166] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5573ea8a-f346-44c7-b659-afacc66c5ce4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.091964] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1029.091964] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]523126a3-9ccc-f296-387c-29cff6c45a8b" [ 1029.091964] env[61629]: _type = "Task" [ 1029.091964] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.099776] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]523126a3-9ccc-f296-387c-29cff6c45a8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.140155] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354578, 'name': ReconfigVM_Task, 'duration_secs': 0.459168} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.143336] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Reconfigured VM instance instance-00000065 to attach disk [datastore1] c99de956-c382-4203-b2a7-d3f8709d188a/c99de956-c382-4203-b2a7-d3f8709d188a.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1029.144027] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a62c7bcb-e85d-4678-8eee-8c567d893161 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.150361] env[61629]: DEBUG oslo_vmware.api [None req-d28c7a6b-ffa1-4fcc-b3d1-98e22ff74848 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354580, 'name': SuspendVM_Task} progress is 70%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.152055] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1029.152055] env[61629]: value = "task-1354582" [ 1029.152055] env[61629]: _type = "Task" [ 1029.152055] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.162264] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354582, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.221545] env[61629]: DEBUG oslo_vmware.api [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Task: {'id': task-1354581, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.426942} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.221923] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1029.222176] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1029.222425] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1029.222634] env[61629]: INFO nova.compute.manager [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1029.222899] env[61629]: DEBUG oslo.service.loopingcall [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1029.223111] env[61629]: DEBUG nova.compute.manager [-] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1029.223204] env[61629]: DEBUG nova.network.neutron [-] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1029.226673] env[61629]: INFO nova.compute.manager [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Detaching volume 8806a247-abb8-469b-9cc7-f3e68e3d20de [ 1029.261858] env[61629]: INFO nova.virt.block_device [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Attempting to driver detach volume 8806a247-abb8-469b-9cc7-f3e68e3d20de from mountpoint /dev/sdb [ 1029.261958] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Volume detach. Driver type: vmdk {{(pid=61629) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1029.262167] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288563', 'volume_id': '8806a247-abb8-469b-9cc7-f3e68e3d20de', 'name': 'volume-8806a247-abb8-469b-9cc7-f3e68e3d20de', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '459c5f25-8fb1-4e43-8f7f-359a7ff697f2', 'attached_at': '', 'detached_at': '', 'volume_id': '8806a247-abb8-469b-9cc7-f3e68e3d20de', 'serial': '8806a247-abb8-469b-9cc7-f3e68e3d20de'} {{(pid=61629) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1029.263888] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825960a7-57b3-43fa-bb55-0dc61e1e383b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.291783] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8095b93-8f7d-40d5-8574-8b51fce25b12 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.299431] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5262d388-1eee-4dea-9856-8805739f600d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.320395] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f7b6bb-682e-40bd-bd2c-d2ed594d6807 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.335365] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] The volume has not been displaced from its original location: [datastore1] volume-8806a247-abb8-469b-9cc7-f3e68e3d20de/volume-8806a247-abb8-469b-9cc7-f3e68e3d20de.vmdk. No consolidation needed. {{(pid=61629) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1029.340881] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Reconfiguring VM instance instance-00000058 to detach disk 2001 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1029.341220] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2ca57c6-328d-47aa-a095-cd459b9b5ced {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.359125] env[61629]: DEBUG oslo_vmware.api [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 1029.359125] env[61629]: value = "task-1354583" [ 1029.359125] env[61629]: _type = "Task" [ 1029.359125] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.367408] env[61629]: DEBUG oslo_vmware.api [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354583, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.441445] env[61629]: DEBUG oslo_concurrency.lockutils [req-5d9f9cd3-0d59-4c8b-acdf-2fe4dc661c88 req-c20a3a6b-f87e-49fe-9547-25615ac574ab service nova] Releasing lock "refresh_cache-3b116c59-a904-4b68-9c74-58954b3de240" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.603028] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]523126a3-9ccc-f296-387c-29cff6c45a8b, 'name': SearchDatastore_Task, 'duration_secs': 0.02372} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.603357] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cb9632e-7432-4e5d-a86b-b43b02ed14fa {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.608731] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1029.608731] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]526c5451-e969-51d7-d1ca-1d3c1dc3b266" [ 1029.608731] env[61629]: _type = "Task" [ 1029.608731] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.616465] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526c5451-e969-51d7-d1ca-1d3c1dc3b266, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.649150] env[61629]: DEBUG oslo_vmware.api [None req-d28c7a6b-ffa1-4fcc-b3d1-98e22ff74848 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354580, 'name': SuspendVM_Task, 'duration_secs': 0.651474} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.649483] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d28c7a6b-ffa1-4fcc-b3d1-98e22ff74848 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Suspended the VM {{(pid=61629) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1029.649685] env[61629]: DEBUG nova.compute.manager [None req-d28c7a6b-ffa1-4fcc-b3d1-98e22ff74848 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1029.650479] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6488814-328e-4bc8-b2e1-90191895112f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.663825] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354582, 'name': Rename_Task, 'duration_secs': 0.168414} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.664298] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1029.664531] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d835970-bcc5-49c9-b820-623abff7bf8d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.669918] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1029.669918] env[61629]: value = "task-1354584" [ 1029.669918] env[61629]: _type = "Task" [ 1029.669918] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.678224] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354584, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.868524] env[61629]: DEBUG oslo_vmware.api [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354583, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.118600] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]526c5451-e969-51d7-d1ca-1d3c1dc3b266, 'name': SearchDatastore_Task, 'duration_secs': 0.009987} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.120045] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.120045] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 3b116c59-a904-4b68-9c74-58954b3de240/3b116c59-a904-4b68-9c74-58954b3de240.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1030.120045] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ed3549a-76a6-47cc-bd69-ddb43d531f0c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.125914] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1030.125914] env[61629]: value = "task-1354585" [ 1030.125914] env[61629]: _type = "Task" [ 1030.125914] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.133336] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354585, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.178995] env[61629]: DEBUG nova.network.neutron [-] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.185291] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354584, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.262641] env[61629]: DEBUG nova.compute.manager [req-2f28327f-e0ff-4464-8d9e-819c50094111 req-bce105f6-850d-44e8-b460-82bb3069f005 service nova] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Received event network-vif-deleted-abf6c35f-b11c-4f9e-b605-8f889ccf39ab {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1030.369319] env[61629]: DEBUG oslo_vmware.api [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354583, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.638062] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354585, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.683372] env[61629]: INFO nova.compute.manager [-] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Took 1.46 seconds to deallocate network for instance. [ 1030.683802] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354584, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.871513] env[61629]: DEBUG oslo_vmware.api [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354583, 'name': ReconfigVM_Task, 'duration_secs': 1.216605} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.871897] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Reconfigured VM instance instance-00000058 to detach disk 2001 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1030.877405] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c7ff8f7-1c1f-4d1f-b937-cfc7e04257e2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.893206] env[61629]: DEBUG oslo_vmware.api [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 1030.893206] env[61629]: value = "task-1354586" [ 1030.893206] env[61629]: _type = "Task" [ 1030.893206] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.901606] env[61629]: DEBUG oslo_vmware.api [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354586, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.084120] env[61629]: INFO nova.compute.manager [None req-ba85a1c3-5a0b-47ec-a8e0-d5530422b310 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Resuming [ 1031.084889] env[61629]: DEBUG nova.objects.instance [None req-ba85a1c3-5a0b-47ec-a8e0-d5530422b310 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lazy-loading 'flavor' on Instance uuid c5b6f6b8-587c-4b74-bc83-98dac319b15b {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1031.137121] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354585, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.76494} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.137415] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 3b116c59-a904-4b68-9c74-58954b3de240/3b116c59-a904-4b68-9c74-58954b3de240.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1031.137631] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1031.137904] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11e1d6eb-8383-4d9d-8aed-c1173e10a90a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.144320] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1031.144320] env[61629]: value = "task-1354587" [ 1031.144320] env[61629]: _type = "Task" [ 1031.144320] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.155316] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354587, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.179912] env[61629]: DEBUG oslo_vmware.api [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354584, 'name': PowerOnVM_Task, 'duration_secs': 1.308558} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.180184] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1031.180426] env[61629]: INFO nova.compute.manager [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Took 7.55 seconds to spawn the instance on the hypervisor. [ 1031.180658] env[61629]: DEBUG nova.compute.manager [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1031.181410] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aaa9893-1a97-4376-8af6-ade7cf60c8e3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.190926] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.190926] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.190926] env[61629]: DEBUG nova.objects.instance [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Lazy-loading 'resources' on Instance uuid 0daebf05-e42b-49c5-aa24-43304a1c3cc0 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1031.403500] env[61629]: DEBUG oslo_vmware.api [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354586, 'name': ReconfigVM_Task, 'duration_secs': 0.173251} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.403842] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-288563', 'volume_id': '8806a247-abb8-469b-9cc7-f3e68e3d20de', 'name': 'volume-8806a247-abb8-469b-9cc7-f3e68e3d20de', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '459c5f25-8fb1-4e43-8f7f-359a7ff697f2', 'attached_at': '', 'detached_at': '', 'volume_id': '8806a247-abb8-469b-9cc7-f3e68e3d20de', 'serial': '8806a247-abb8-469b-9cc7-f3e68e3d20de'} {{(pid=61629) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1031.654221] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354587, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.187308} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.654579] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1031.655364] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff29d9b4-0ae7-4072-9b16-fa46f2d34c9e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.676878] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 3b116c59-a904-4b68-9c74-58954b3de240/3b116c59-a904-4b68-9c74-58954b3de240.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1031.677152] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c462008e-b179-428f-a747-cadf9d4c538f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.700368] env[61629]: INFO nova.compute.manager [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Took 13.05 seconds to build instance. [ 1031.704965] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1031.704965] env[61629]: value = "task-1354588" [ 1031.704965] env[61629]: _type = "Task" [ 1031.704965] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.714978] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354588, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.814906] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8717bd73-c6de-47ff-b97d-be4f3cf3ca8c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.822512] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83bddcc5-318c-44fa-b5a1-83ec132ae6ea {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.855749] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36cd8ab0-9207-4b1c-8325-04a9796a481c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.863018] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b414d90c-b681-47f4-9329-440eb5f1ae0c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.876613] env[61629]: DEBUG nova.compute.provider_tree [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.954762] env[61629]: DEBUG nova.objects.instance [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lazy-loading 'flavor' on Instance uuid 459c5f25-8fb1-4e43-8f7f-359a7ff697f2 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.094518] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ba85a1c3-5a0b-47ec-a8e0-d5530422b310 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.094746] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ba85a1c3-5a0b-47ec-a8e0-d5530422b310 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquired lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.094904] env[61629]: DEBUG nova.network.neutron [None req-ba85a1c3-5a0b-47ec-a8e0-d5530422b310 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1032.204170] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9f34350a-53af-41c5-9553-ce491868847c tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "c99de956-c382-4203-b2a7-d3f8709d188a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.566s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.205741] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "c99de956-c382-4203-b2a7-d3f8709d188a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.205995] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "c99de956-c382-4203-b2a7-d3f8709d188a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.206268] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "c99de956-c382-4203-b2a7-d3f8709d188a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.206470] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "c99de956-c382-4203-b2a7-d3f8709d188a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.206644] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "c99de956-c382-4203-b2a7-d3f8709d188a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.211907] env[61629]: INFO nova.compute.manager [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Terminating instance [ 1032.214043] env[61629]: DEBUG nova.compute.manager [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1032.214272] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1032.215031] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190d1b7d-e4c2-46b7-bf71-962f8325c906 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.221445] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354588, 'name': ReconfigVM_Task, 'duration_secs': 0.288357} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.222014] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 3b116c59-a904-4b68-9c74-58954b3de240/3b116c59-a904-4b68-9c74-58954b3de240.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1032.222584] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1bb715fb-8931-4f4c-906f-757de40fe641 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.225791] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1032.226324] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd78164b-70a6-4168-b109-4af39d4f65ea {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.230379] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1032.230379] env[61629]: value = "task-1354589" [ 1032.230379] env[61629]: _type = "Task" [ 1032.230379] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.234387] env[61629]: DEBUG oslo_vmware.api [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1032.234387] env[61629]: value = "task-1354590" [ 1032.234387] env[61629]: _type = "Task" [ 1032.234387] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.244256] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354589, 'name': Rename_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.247162] env[61629]: DEBUG oslo_vmware.api [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354590, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.382052] env[61629]: DEBUG nova.scheduler.client.report [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1032.743335] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354589, 'name': Rename_Task, 'duration_secs': 0.135415} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.746468] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1032.746737] env[61629]: DEBUG oslo_vmware.api [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354590, 'name': PowerOffVM_Task, 'duration_secs': 0.311525} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.746940] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa9e735b-8ef3-459d-9067-46446e58a6c9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.748404] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1032.748584] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1032.748814] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2641cbeb-42da-4f9d-90c7-757e78d588ce {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.755096] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1032.755096] env[61629]: value = "task-1354591" [ 1032.755096] env[61629]: _type = "Task" [ 1032.755096] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.768947] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354591, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.796947] env[61629]: DEBUG nova.network.neutron [None req-ba85a1c3-5a0b-47ec-a8e0-d5530422b310 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Updating instance_info_cache with network_info: [{"id": "57805f12-9b81-4485-8f3a-32567ed40a8c", "address": "fa:16:3e:ca:29:87", "network": {"id": "249c4ba3-38e0-421a-91b6-cf97f90eb535", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1700423127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bd318d29ec50427eb997c83837120c9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57805f12-9b", "ovs_interfaceid": "57805f12-9b81-4485-8f3a-32567ed40a8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.867563] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1032.868152] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1032.868152] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleting the datastore file [datastore1] c99de956-c382-4203-b2a7-d3f8709d188a {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1032.868354] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d684d71-9df1-4ea5-8d4e-6bc23714557c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.874757] env[61629]: DEBUG oslo_vmware.api [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1032.874757] env[61629]: value = "task-1354593" [ 1032.874757] env[61629]: _type = "Task" [ 1032.874757] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.882968] env[61629]: DEBUG oslo_vmware.api [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354593, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.886764] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.696s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.906691] env[61629]: INFO nova.scheduler.client.report [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Deleted allocations for instance 0daebf05-e42b-49c5-aa24-43304a1c3cc0 [ 1032.963037] env[61629]: DEBUG oslo_concurrency.lockutils [None req-1670447e-a245-49b6-872e-83c921f4f950 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.239s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.266091] env[61629]: DEBUG oslo_vmware.api [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354591, 'name': PowerOnVM_Task, 'duration_secs': 0.461514} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.266251] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1033.266349] env[61629]: INFO nova.compute.manager [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Took 7.42 seconds to spawn the instance on the hypervisor. [ 1033.266538] env[61629]: DEBUG nova.compute.manager [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1033.267317] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-712458df-ca20-48a4-980b-7ea456b4c2ed {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.301428] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ba85a1c3-5a0b-47ec-a8e0-d5530422b310 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Releasing lock "refresh_cache-c5b6f6b8-587c-4b74-bc83-98dac319b15b" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.303100] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902fafa4-cf03-45a2-8b9f-3cce75d05781 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.310347] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ba85a1c3-5a0b-47ec-a8e0-d5530422b310 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Resuming the VM {{(pid=61629) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1033.310617] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37830f71-03f8-420b-a169-d200c6144d82 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.318393] env[61629]: DEBUG oslo_vmware.api [None req-ba85a1c3-5a0b-47ec-a8e0-d5530422b310 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 1033.318393] env[61629]: value = "task-1354594" [ 1033.318393] env[61629]: _type = "Task" [ 1033.318393] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.331883] env[61629]: DEBUG oslo_vmware.api [None req-ba85a1c3-5a0b-47ec-a8e0-d5530422b310 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354594, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.385335] env[61629]: DEBUG oslo_vmware.api [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354593, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149259} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.385620] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1033.385823] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1033.386025] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1033.386174] env[61629]: INFO nova.compute.manager [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1033.386453] env[61629]: DEBUG oslo.service.loopingcall [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1033.386658] env[61629]: DEBUG nova.compute.manager [-] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1033.386944] env[61629]: DEBUG nova.network.neutron [-] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1033.416342] env[61629]: DEBUG oslo_concurrency.lockutils [None req-5c6cb900-f375-4a25-bb86-6ed053fff0c3 tempest-ServersTestManualDisk-1423538324 tempest-ServersTestManualDisk-1423538324-project-member] Lock "0daebf05-e42b-49c5-aa24-43304a1c3cc0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.380s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.722849] env[61629]: DEBUG nova.compute.manager [req-e245f4ef-7ccd-4126-bdf6-4cffd5582466 req-c8e08dec-48e5-4f32-baca-1c691893b9b2 service nova] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Received event network-vif-deleted-740d1d45-2daa-44df-ac77-406865a8b131 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1033.723083] env[61629]: INFO nova.compute.manager [req-e245f4ef-7ccd-4126-bdf6-4cffd5582466 req-c8e08dec-48e5-4f32-baca-1c691893b9b2 service nova] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Neutron deleted interface 740d1d45-2daa-44df-ac77-406865a8b131; detaching it from the instance and deleting it from the info cache [ 1033.723269] env[61629]: DEBUG nova.network.neutron [req-e245f4ef-7ccd-4126-bdf6-4cffd5582466 req-c8e08dec-48e5-4f32-baca-1c691893b9b2 service nova] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.789656] env[61629]: INFO nova.compute.manager [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Took 12.92 seconds to build instance. [ 1033.835086] env[61629]: DEBUG oslo_vmware.api [None req-ba85a1c3-5a0b-47ec-a8e0-d5530422b310 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354594, 'name': PowerOnVM_Task} progress is 93%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.105153] env[61629]: DEBUG oslo_concurrency.lockutils [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.105450] env[61629]: DEBUG oslo_concurrency.lockutils [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.105671] env[61629]: DEBUG oslo_concurrency.lockutils [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.105863] env[61629]: DEBUG oslo_concurrency.lockutils [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.106057] env[61629]: DEBUG oslo_concurrency.lockutils [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.108357] env[61629]: INFO nova.compute.manager [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Terminating instance [ 1034.110372] env[61629]: DEBUG nova.compute.manager [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1034.110619] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1034.111483] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f800b272-2fe8-4025-a1e1-a2a6f6bd405e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.121520] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1034.121871] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03b3fc63-91eb-4a1f-a7e7-e758e6077cd5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.131310] env[61629]: DEBUG oslo_vmware.api [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 1034.131310] env[61629]: value = "task-1354595" [ 1034.131310] env[61629]: _type = "Task" [ 1034.131310] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.139835] env[61629]: DEBUG oslo_vmware.api [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354595, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.154561] env[61629]: DEBUG nova.network.neutron [-] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.230967] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9d70b831-8851-428e-baab-88bfbd63052a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.243716] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2a7a61-fcd2-4582-8758-3969f31bfda6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.281921] env[61629]: DEBUG nova.compute.manager [req-e245f4ef-7ccd-4126-bdf6-4cffd5582466 req-c8e08dec-48e5-4f32-baca-1c691893b9b2 service nova] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Detach interface failed, port_id=740d1d45-2daa-44df-ac77-406865a8b131, reason: Instance c99de956-c382-4203-b2a7-d3f8709d188a could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1034.292275] env[61629]: DEBUG oslo_concurrency.lockutils [None req-005fff4b-2fa5-40f3-917a-cdaa7f4bd759 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "3b116c59-a904-4b68-9c74-58954b3de240" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.428s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.313359] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e62b5b0-6d87-47d2-ada1-930c403fdaf8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.320424] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-bdcbbcd3-b2dc-47ab-88df-b36452fafa80 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Suspending the VM {{(pid=61629) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1034.323790] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-496d5e6a-e9ab-442e-be8d-54606bea64c2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.330944] env[61629]: DEBUG oslo_vmware.api [None req-ba85a1c3-5a0b-47ec-a8e0-d5530422b310 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354594, 'name': PowerOnVM_Task, 'duration_secs': 0.626973} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.334028] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ba85a1c3-5a0b-47ec-a8e0-d5530422b310 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Resumed the VM {{(pid=61629) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1034.334028] env[61629]: DEBUG nova.compute.manager [None req-ba85a1c3-5a0b-47ec-a8e0-d5530422b310 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1034.334028] env[61629]: DEBUG oslo_vmware.api [None req-bdcbbcd3-b2dc-47ab-88df-b36452fafa80 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1034.334028] env[61629]: value = "task-1354596" [ 1034.334028] env[61629]: _type = "Task" [ 1034.334028] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.334739] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57daf039-03b9-4fd4-b3dd-684cb01aab19 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.349829] env[61629]: DEBUG oslo_vmware.api [None req-bdcbbcd3-b2dc-47ab-88df-b36452fafa80 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354596, 'name': SuspendVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.641739] env[61629]: DEBUG oslo_vmware.api [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354595, 'name': PowerOffVM_Task, 'duration_secs': 0.209784} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.642085] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1034.642267] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1034.642537] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c797b50b-ede5-4c94-ac35-eed68f6913d0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.659922] env[61629]: INFO nova.compute.manager [-] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Took 1.27 seconds to deallocate network for instance. [ 1034.709863] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1034.710106] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1034.710298] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Deleting the datastore file [datastore2] 459c5f25-8fb1-4e43-8f7f-359a7ff697f2 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1034.711315] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e9b24b3-3561-4349-b0bb-1cdaedbfc23e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.718602] env[61629]: DEBUG oslo_vmware.api [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 1034.718602] env[61629]: value = "task-1354598" [ 1034.718602] env[61629]: _type = "Task" [ 1034.718602] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.726473] env[61629]: DEBUG oslo_vmware.api [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354598, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.849441] env[61629]: DEBUG oslo_vmware.api [None req-bdcbbcd3-b2dc-47ab-88df-b36452fafa80 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354596, 'name': SuspendVM_Task} progress is 70%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.172067] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.172067] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.172067] env[61629]: DEBUG nova.objects.instance [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lazy-loading 'resources' on Instance uuid c99de956-c382-4203-b2a7-d3f8709d188a {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.228550] env[61629]: DEBUG oslo_vmware.api [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354598, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248679} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.228819] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1035.229014] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1035.229219] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1035.229434] env[61629]: INFO nova.compute.manager [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1035.229721] env[61629]: DEBUG oslo.service.loopingcall [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1035.229882] env[61629]: DEBUG nova.compute.manager [-] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1035.229972] env[61629]: DEBUG nova.network.neutron [-] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1035.347529] env[61629]: DEBUG oslo_vmware.api [None req-bdcbbcd3-b2dc-47ab-88df-b36452fafa80 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354596, 'name': SuspendVM_Task, 'duration_secs': 0.726594} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.347806] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-bdcbbcd3-b2dc-47ab-88df-b36452fafa80 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Suspended the VM {{(pid=61629) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1035.347989] env[61629]: DEBUG nova.compute.manager [None req-bdcbbcd3-b2dc-47ab-88df-b36452fafa80 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1035.348767] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a5e436-4bf7-4e84-9b74-977fcc173380 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.792336] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1543c6ad-444f-437a-b466-abc6bea26ca0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.801670] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e571534e-70b3-4f1a-8122-e1e9e106a611 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.809055] env[61629]: DEBUG nova.compute.manager [req-fd0d1737-33b2-420c-8a4c-710b4b1c6950 req-976e616c-18e8-4262-afc8-e89d76229ac0 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Received event network-vif-deleted-c827ba81-d74a-4ff3-bfc2-81b5e09c683c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1035.809300] env[61629]: INFO nova.compute.manager [req-fd0d1737-33b2-420c-8a4c-710b4b1c6950 req-976e616c-18e8-4262-afc8-e89d76229ac0 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Neutron deleted interface c827ba81-d74a-4ff3-bfc2-81b5e09c683c; detaching it from the instance and deleting it from the info cache [ 1035.809495] env[61629]: DEBUG nova.network.neutron [req-fd0d1737-33b2-420c-8a4c-710b4b1c6950 req-976e616c-18e8-4262-afc8-e89d76229ac0 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.835782] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c7baf0-04dd-4632-814a-3b700e532a7f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.838393] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7c092d85-acfc-46d3-b72b-9e5f32bc77f4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.847659] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342b4121-5efd-4f15-b3bd-569cdb4e11ff {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.854241] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff93fd07-6954-4d7e-9de5-537d34d56f1d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.876395] env[61629]: DEBUG nova.compute.provider_tree [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1035.890724] env[61629]: DEBUG nova.compute.manager [req-fd0d1737-33b2-420c-8a4c-710b4b1c6950 req-976e616c-18e8-4262-afc8-e89d76229ac0 service nova] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Detach interface failed, port_id=c827ba81-d74a-4ff3-bfc2-81b5e09c683c, reason: Instance 459c5f25-8fb1-4e43-8f7f-359a7ff697f2 could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1036.285634] env[61629]: DEBUG nova.network.neutron [-] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.379877] env[61629]: DEBUG nova.scheduler.client.report [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1036.787989] env[61629]: INFO nova.compute.manager [-] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Took 1.56 seconds to deallocate network for instance. [ 1036.798014] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "3b116c59-a904-4b68-9c74-58954b3de240" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.798014] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "3b116c59-a904-4b68-9c74-58954b3de240" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.798014] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "3b116c59-a904-4b68-9c74-58954b3de240-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.798014] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "3b116c59-a904-4b68-9c74-58954b3de240-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.798014] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "3b116c59-a904-4b68-9c74-58954b3de240-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.798014] env[61629]: INFO nova.compute.manager [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Terminating instance [ 1036.799503] env[61629]: DEBUG nova.compute.manager [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1036.799677] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.800533] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4a1510-e520-46a5-b8cc-7367a293e7ea {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.808156] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1036.808472] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13d0a911-84e0-46b4-ba21-2a6d56a5e2a1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.870487] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1036.870487] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1036.870487] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleting the datastore file [datastore1] 3b116c59-a904-4b68-9c74-58954b3de240 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1036.870487] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9500e048-1d37-4f8b-9ad9-f3cc9fb99588 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.876482] env[61629]: DEBUG oslo_vmware.api [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1036.876482] env[61629]: value = "task-1354600" [ 1036.876482] env[61629]: _type = "Task" [ 1036.876482] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.884905] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.714s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.886858] env[61629]: DEBUG oslo_vmware.api [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354600, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.909089] env[61629]: INFO nova.scheduler.client.report [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleted allocations for instance c99de956-c382-4203-b2a7-d3f8709d188a [ 1037.298958] env[61629]: DEBUG oslo_concurrency.lockutils [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.299276] env[61629]: DEBUG oslo_concurrency.lockutils [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.299547] env[61629]: DEBUG nova.objects.instance [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lazy-loading 'resources' on Instance uuid 459c5f25-8fb1-4e43-8f7f-359a7ff697f2 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.385931] env[61629]: DEBUG oslo_vmware.api [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354600, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173859} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.386184] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.386377] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1037.386556] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1037.386731] env[61629]: INFO nova.compute.manager [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1037.386970] env[61629]: DEBUG oslo.service.loopingcall [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1037.387173] env[61629]: DEBUG nova.compute.manager [-] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1037.387269] env[61629]: DEBUG nova.network.neutron [-] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1037.416275] env[61629]: DEBUG oslo_concurrency.lockutils [None req-d83277b2-40c8-4466-9f1a-81155625730a tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "c99de956-c382-4203-b2a7-d3f8709d188a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.210s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.837242] env[61629]: DEBUG nova.compute.manager [req-1264f01b-8fba-4dcd-b8c1-a51e5788581a req-ac08a5b0-c821-4510-a134-6183fd5cb0a7 service nova] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Received event network-vif-deleted-2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1037.837242] env[61629]: INFO nova.compute.manager [req-1264f01b-8fba-4dcd-b8c1-a51e5788581a req-ac08a5b0-c821-4510-a134-6183fd5cb0a7 service nova] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Neutron deleted interface 2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07; detaching it from the instance and deleting it from the info cache [ 1037.837513] env[61629]: DEBUG nova.network.neutron [req-1264f01b-8fba-4dcd-b8c1-a51e5788581a req-ac08a5b0-c821-4510-a134-6183fd5cb0a7 service nova] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.896900] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5186624e-d908-4cc4-bb18-95ae00271a46 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.905108] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eea4f09-6120-4edb-8cf0-e6cd78bf68e0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.935438] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d75e9dd-5c3a-43f2-a266-2d46a0c784af {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.942852] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886e5baf-70d1-48e0-9fd6-faecbd6840ee {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.955943] env[61629]: DEBUG nova.compute.provider_tree [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.087561] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "88cf532c-b13b-4c27-8637-d24bb6d73b82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.087803] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "88cf532c-b13b-4c27-8637-d24bb6d73b82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.189788] env[61629]: DEBUG nova.network.neutron [-] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.340415] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-78ad9f28-a5f0-4609-b83a-05d84fb93f3e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.350016] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc011161-f127-41a3-98cd-e6ea3777cc4e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.374285] env[61629]: DEBUG nova.compute.manager [req-1264f01b-8fba-4dcd-b8c1-a51e5788581a req-ac08a5b0-c821-4510-a134-6183fd5cb0a7 service nova] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Detach interface failed, port_id=2ca66e03-30d5-4fcf-b92c-0f5ea8e7ac07, reason: Instance 3b116c59-a904-4b68-9c74-58954b3de240 could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1038.459489] env[61629]: DEBUG nova.scheduler.client.report [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1038.590403] env[61629]: DEBUG nova.compute.manager [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1038.692491] env[61629]: INFO nova.compute.manager [-] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Took 1.31 seconds to deallocate network for instance. [ 1038.964544] env[61629]: DEBUG oslo_concurrency.lockutils [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.665s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.985788] env[61629]: INFO nova.scheduler.client.report [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Deleted allocations for instance 459c5f25-8fb1-4e43-8f7f-359a7ff697f2 [ 1039.109093] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.109093] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.110152] env[61629]: INFO nova.compute.claims [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1039.198641] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.498103] env[61629]: DEBUG oslo_concurrency.lockutils [None req-642b6081-34bc-4f1d-9d70-a84eff97f6be tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "459c5f25-8fb1-4e43-8f7f-359a7ff697f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.392s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.531441] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.531730] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.531938] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.532145] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.532325] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.534401] env[61629]: INFO nova.compute.manager [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Terminating instance [ 1039.536132] env[61629]: DEBUG nova.compute.manager [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1039.536336] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1039.537185] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84517711-519c-4904-981f-13d08e9d15ad {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.545062] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1039.545334] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea8d6550-3461-4a55-aa11-b54318ba7977 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.550963] env[61629]: DEBUG oslo_vmware.api [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 1039.550963] env[61629]: value = "task-1354602" [ 1039.550963] env[61629]: _type = "Task" [ 1039.550963] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.557900] env[61629]: DEBUG oslo_vmware.api [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354602, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.061299] env[61629]: DEBUG oslo_vmware.api [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354602, 'name': PowerOffVM_Task, 'duration_secs': 0.232413} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.061675] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1040.061809] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1040.062376] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f10c13f4-d18d-4410-a0a1-69987aaf8c4f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.124027] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1040.124280] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1040.124471] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Deleting the datastore file [datastore1] c5b6f6b8-587c-4b74-bc83-98dac319b15b {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.124742] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e082e79-a9be-40ed-a873-1f55c71b561b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.130574] env[61629]: DEBUG oslo_vmware.api [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for the task: (returnval){ [ 1040.130574] env[61629]: value = "task-1354604" [ 1040.130574] env[61629]: _type = "Task" [ 1040.130574] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.138016] env[61629]: DEBUG oslo_vmware.api [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354604, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.205291] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4fbe3d7-0f3b-4c80-a282-5acd1407e82b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.212650] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e4129f-2278-4725-8c40-770edc9b36f4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.241761] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a79a27-4827-46ff-8aac-a3998a3be52b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.244360] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1040.244593] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1040.244741] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Starting heal instance info cache {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1040.251276] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455a8114-a4fd-4ab6-a483-2a30a378cf7e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.264407] env[61629]: DEBUG nova.compute.provider_tree [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.639704] env[61629]: DEBUG oslo_vmware.api [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Task: {'id': task-1354604, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14432} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.639981] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1040.640223] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1040.640421] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1040.640608] env[61629]: INFO nova.compute.manager [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1040.640854] env[61629]: DEBUG oslo.service.loopingcall [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1040.641070] env[61629]: DEBUG nova.compute.manager [-] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1040.641173] env[61629]: DEBUG nova.network.neutron [-] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1040.767668] env[61629]: DEBUG nova.scheduler.client.report [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1040.894524] env[61629]: DEBUG nova.compute.manager [req-46e3e6bf-cd33-43df-8a15-33ed5ba4b1d2 req-3589b412-37a0-412f-9cc9-1b3cd35f2ddc service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Received event network-vif-deleted-57805f12-9b81-4485-8f3a-32567ed40a8c {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1040.894771] env[61629]: INFO nova.compute.manager [req-46e3e6bf-cd33-43df-8a15-33ed5ba4b1d2 req-3589b412-37a0-412f-9cc9-1b3cd35f2ddc service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Neutron deleted interface 57805f12-9b81-4485-8f3a-32567ed40a8c; detaching it from the instance and deleting it from the info cache [ 1040.894951] env[61629]: DEBUG nova.network.neutron [req-46e3e6bf-cd33-43df-8a15-33ed5ba4b1d2 req-3589b412-37a0-412f-9cc9-1b3cd35f2ddc service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.272484] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.164s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.273012] env[61629]: DEBUG nova.compute.manager [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1041.276539] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.078s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.276773] env[61629]: DEBUG nova.objects.instance [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lazy-loading 'resources' on Instance uuid 3b116c59-a904-4b68-9c74-58954b3de240 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.373625] env[61629]: DEBUG nova.network.neutron [-] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.396855] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d3c0a588-32c9-469f-8912-e4a1597dd3d5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.406396] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86cfb7e-9878-47a1-b704-f60678f960c3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.429225] env[61629]: DEBUG nova.compute.manager [req-46e3e6bf-cd33-43df-8a15-33ed5ba4b1d2 req-3589b412-37a0-412f-9cc9-1b3cd35f2ddc service nova] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Detach interface failed, port_id=57805f12-9b81-4485-8f3a-32567ed40a8c, reason: Instance c5b6f6b8-587c-4b74-bc83-98dac319b15b could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1041.752770] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Didn't find any instances for network info cache update. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1041.753077] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.753254] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.753428] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.753600] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.753752] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.753916] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.754063] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61629) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1041.754235] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager.update_available_resource {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.778406] env[61629]: DEBUG nova.compute.utils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1041.782645] env[61629]: DEBUG nova.compute.manager [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1041.782831] env[61629]: DEBUG nova.network.neutron [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1041.834043] env[61629]: DEBUG nova.policy [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c62f9a7c8b5f4ef985880339407b46a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0578ce75c37942d4ba6c8b862ceb7d92', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 1041.867161] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efcaba0-45fb-482b-b516-4c39d732afba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.874790] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d8c2fc-c95a-47b6-a138-464bb24febf7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.878037] env[61629]: INFO nova.compute.manager [-] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Took 1.24 seconds to deallocate network for instance. [ 1041.908508] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaebe80e-fc61-47b7-a98e-c8730ca3bc8e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.916300] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2fd0848-a7b5-4643-9caa-98d2bb9213c7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.930100] env[61629]: DEBUG nova.compute.provider_tree [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.251790] env[61629]: DEBUG nova.network.neutron [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Successfully created port: c6cc50f1-033b-4f53-924c-25fa657edba9 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1042.256826] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.285647] env[61629]: DEBUG nova.compute.manager [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1042.409721] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.432896] env[61629]: DEBUG nova.scheduler.client.report [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1042.937737] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.661s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.940895] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.683s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.940895] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.940895] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61629) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1042.940895] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.531s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.941356] env[61629]: DEBUG nova.objects.instance [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lazy-loading 'resources' on Instance uuid c5b6f6b8-587c-4b74-bc83-98dac319b15b {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1042.942731] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb4a2b1-b859-410d-b938-f9433e3a8b66 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.951145] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7796589e-1a6a-4bef-878c-402e759da095 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.965843] env[61629]: INFO nova.scheduler.client.report [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleted allocations for instance 3b116c59-a904-4b68-9c74-58954b3de240 [ 1042.967143] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8850b77-2c41-4db2-b644-4bdc4858ccc6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.976046] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a945aa-7e75-4f16-9fd7-d665c36bb02f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.006574] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180466MB free_disk=151GB free_vcpus=48 pci_devices=None {{(pid=61629) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1043.006574] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.298198] env[61629]: DEBUG nova.compute.manager [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1043.323766] env[61629]: DEBUG nova.virt.hardware [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1043.324046] env[61629]: DEBUG nova.virt.hardware [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1043.324214] env[61629]: DEBUG nova.virt.hardware [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1043.324467] env[61629]: DEBUG nova.virt.hardware [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1043.324645] env[61629]: DEBUG nova.virt.hardware [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1043.324795] env[61629]: DEBUG nova.virt.hardware [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1043.325013] env[61629]: DEBUG nova.virt.hardware [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1043.325191] env[61629]: DEBUG nova.virt.hardware [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1043.325362] env[61629]: DEBUG nova.virt.hardware [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1043.325528] env[61629]: DEBUG nova.virt.hardware [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1043.325701] env[61629]: DEBUG nova.virt.hardware [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1043.326564] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326e9dff-8b12-4dd2-81f6-ab1d71df87ba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.334193] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a3c949-2c6d-48e8-a740-a5f0336a3b4f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.476275] env[61629]: DEBUG oslo_concurrency.lockutils [None req-fc1337b2-55fb-45cd-8a33-5d60242a12fe tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "3b116c59-a904-4b68-9c74-58954b3de240" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.680s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.514984] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492276de-f475-4594-95b9-575ee6c82560 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.522703] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c24366e-d2a6-41a4-8a5f-d60049a371ca {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.552754] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721ac740-cd9f-416c-8be2-c7746878cf3f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.559880] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee41c00-79be-478c-9c69-1304d0b5dddc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.572769] env[61629]: DEBUG nova.compute.provider_tree [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.631125] env[61629]: DEBUG nova.compute.manager [req-0074e5ff-ef6d-4b2e-b78e-1f96816a095f req-99b3e493-b80b-4e02-b902-4275389e6b7e service nova] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Received event network-vif-plugged-c6cc50f1-033b-4f53-924c-25fa657edba9 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1043.631423] env[61629]: DEBUG oslo_concurrency.lockutils [req-0074e5ff-ef6d-4b2e-b78e-1f96816a095f req-99b3e493-b80b-4e02-b902-4275389e6b7e service nova] Acquiring lock "88cf532c-b13b-4c27-8637-d24bb6d73b82-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.631581] env[61629]: DEBUG oslo_concurrency.lockutils [req-0074e5ff-ef6d-4b2e-b78e-1f96816a095f req-99b3e493-b80b-4e02-b902-4275389e6b7e service nova] Lock "88cf532c-b13b-4c27-8637-d24bb6d73b82-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.631750] env[61629]: DEBUG oslo_concurrency.lockutils [req-0074e5ff-ef6d-4b2e-b78e-1f96816a095f req-99b3e493-b80b-4e02-b902-4275389e6b7e service nova] Lock "88cf532c-b13b-4c27-8637-d24bb6d73b82-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.632080] env[61629]: DEBUG nova.compute.manager [req-0074e5ff-ef6d-4b2e-b78e-1f96816a095f req-99b3e493-b80b-4e02-b902-4275389e6b7e service nova] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] No waiting events found dispatching network-vif-plugged-c6cc50f1-033b-4f53-924c-25fa657edba9 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1043.632147] env[61629]: WARNING nova.compute.manager [req-0074e5ff-ef6d-4b2e-b78e-1f96816a095f req-99b3e493-b80b-4e02-b902-4275389e6b7e service nova] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Received unexpected event network-vif-plugged-c6cc50f1-033b-4f53-924c-25fa657edba9 for instance with vm_state building and task_state spawning. [ 1043.719018] env[61629]: DEBUG nova.network.neutron [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Successfully updated port: c6cc50f1-033b-4f53-924c-25fa657edba9 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1044.075787] env[61629]: DEBUG nova.scheduler.client.report [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1044.207669] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "0d21b352-bdd0-4887-8658-cd5c448352d2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.207984] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "0d21b352-bdd0-4887-8658-cd5c448352d2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.208265] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "0d21b352-bdd0-4887-8658-cd5c448352d2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.208472] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "0d21b352-bdd0-4887-8658-cd5c448352d2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.208658] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "0d21b352-bdd0-4887-8658-cd5c448352d2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.210856] env[61629]: INFO nova.compute.manager [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Terminating instance [ 1044.212697] env[61629]: DEBUG nova.compute.manager [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1044.212850] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1044.213695] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de1bb4c-ccb8-4e3d-8516-780591e97fa6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.221651] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1044.222145] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-afcd09ca-1a0e-4701-80e3-83d7e6f0b896 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.223811] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "refresh_cache-88cf532c-b13b-4c27-8637-d24bb6d73b82" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.223938] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "refresh_cache-88cf532c-b13b-4c27-8637-d24bb6d73b82" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.224089] env[61629]: DEBUG nova.network.neutron [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1044.229697] env[61629]: DEBUG oslo_vmware.api [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 1044.229697] env[61629]: value = "task-1354605" [ 1044.229697] env[61629]: _type = "Task" [ 1044.229697] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.238535] env[61629]: DEBUG oslo_vmware.api [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354605, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.580966] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.640s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.583323] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 1.577s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.598259] env[61629]: INFO nova.scheduler.client.report [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Deleted allocations for instance c5b6f6b8-587c-4b74-bc83-98dac319b15b [ 1044.739169] env[61629]: DEBUG oslo_vmware.api [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354605, 'name': PowerOffVM_Task, 'duration_secs': 0.220936} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.739497] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1044.739707] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1044.739967] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d04f9f54-8bf0-42ef-b74a-55223d28acba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.757319] env[61629]: DEBUG nova.network.neutron [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1044.804381] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1044.804616] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1044.804805] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Deleting the datastore file [datastore2] 0d21b352-bdd0-4887-8658-cd5c448352d2 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.805087] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46eb8bdb-5268-4618-b9a9-f76ec4fe4878 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.811674] env[61629]: DEBUG oslo_vmware.api [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 1044.811674] env[61629]: value = "task-1354607" [ 1044.811674] env[61629]: _type = "Task" [ 1044.811674] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.821534] env[61629]: DEBUG oslo_vmware.api [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354607, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.841807] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.842063] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.914190] env[61629]: DEBUG nova.network.neutron [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Updating instance_info_cache with network_info: [{"id": "c6cc50f1-033b-4f53-924c-25fa657edba9", "address": "fa:16:3e:2d:cc:97", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6cc50f1-03", "ovs_interfaceid": "c6cc50f1-033b-4f53-924c-25fa657edba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.105348] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b93eda6b-fd32-4a87-92fd-8edc8831df19 tempest-ServersNegativeTestJSON-94043772 tempest-ServersNegativeTestJSON-94043772-project-member] Lock "c5b6f6b8-587c-4b74-bc83-98dac319b15b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.573s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.322128] env[61629]: DEBUG oslo_vmware.api [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354607, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.222522} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.322360] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1045.322572] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1045.323723] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1045.323723] env[61629]: INFO nova.compute.manager [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1045.323723] env[61629]: DEBUG oslo.service.loopingcall [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1045.323723] env[61629]: DEBUG nova.compute.manager [-] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1045.323723] env[61629]: DEBUG nova.network.neutron [-] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1045.344029] env[61629]: DEBUG nova.compute.manager [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1045.389384] env[61629]: DEBUG nova.objects.instance [None req-b66cf585-3078-4e26-8b6e-4bb732013935 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Lazy-loading 'flavor' on Instance uuid 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1045.416866] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "refresh_cache-88cf532c-b13b-4c27-8637-d24bb6d73b82" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.417217] env[61629]: DEBUG nova.compute.manager [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Instance network_info: |[{"id": "c6cc50f1-033b-4f53-924c-25fa657edba9", "address": "fa:16:3e:2d:cc:97", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6cc50f1-03", "ovs_interfaceid": "c6cc50f1-033b-4f53-924c-25fa657edba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1045.417668] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:cc:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6cc50f1-033b-4f53-924c-25fa657edba9', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1045.425309] env[61629]: DEBUG oslo.service.loopingcall [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1045.426067] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1045.426310] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac26a6f6-f8f5-4b45-ba58-7582830625e9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.446708] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1045.446708] env[61629]: value = "task-1354608" [ 1045.446708] env[61629]: _type = "Task" [ 1045.446708] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.454195] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354608, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.610058] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 87a1383f-d66b-4bde-b153-89ac62ff8390 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1045.610367] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 0d21b352-bdd0-4887-8658-cd5c448352d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1045.610367] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1045.610470] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 88cf532c-b13b-4c27-8637-d24bb6d73b82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1045.659071] env[61629]: DEBUG nova.compute.manager [req-24956a19-8c30-42ec-b637-6f09c8bc0922 req-8dc8a5be-c6b0-4ba5-a654-ee5a68e21bfc service nova] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Received event network-changed-c6cc50f1-033b-4f53-924c-25fa657edba9 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1045.659271] env[61629]: DEBUG nova.compute.manager [req-24956a19-8c30-42ec-b637-6f09c8bc0922 req-8dc8a5be-c6b0-4ba5-a654-ee5a68e21bfc service nova] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Refreshing instance network info cache due to event network-changed-c6cc50f1-033b-4f53-924c-25fa657edba9. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1045.659569] env[61629]: DEBUG oslo_concurrency.lockutils [req-24956a19-8c30-42ec-b637-6f09c8bc0922 req-8dc8a5be-c6b0-4ba5-a654-ee5a68e21bfc service nova] Acquiring lock "refresh_cache-88cf532c-b13b-4c27-8637-d24bb6d73b82" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.659809] env[61629]: DEBUG oslo_concurrency.lockutils [req-24956a19-8c30-42ec-b637-6f09c8bc0922 req-8dc8a5be-c6b0-4ba5-a654-ee5a68e21bfc service nova] Acquired lock "refresh_cache-88cf532c-b13b-4c27-8637-d24bb6d73b82" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.660090] env[61629]: DEBUG nova.network.neutron [req-24956a19-8c30-42ec-b637-6f09c8bc0922 req-8dc8a5be-c6b0-4ba5-a654-ee5a68e21bfc service nova] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Refreshing network info cache for port c6cc50f1-033b-4f53-924c-25fa657edba9 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1045.866099] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.896105] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b66cf585-3078-4e26-8b6e-4bb732013935 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquiring lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.896105] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b66cf585-3078-4e26-8b6e-4bb732013935 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquired lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.956790] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354608, 'name': CreateVM_Task, 'duration_secs': 0.294921} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.956966] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1045.957793] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.957981] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.958349] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1045.958607] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c768429-ae3b-453c-ae94-b03937dc0cdc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.963492] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1045.963492] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52d44186-a542-f11f-781a-5948fe87f5ec" [ 1045.963492] env[61629]: _type = "Task" [ 1045.963492] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.972372] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d44186-a542-f11f-781a-5948fe87f5ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.060369] env[61629]: DEBUG nova.network.neutron [-] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.113245] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance 843e230d-fb7e-4375-83f4-78fe6d9de9ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.113438] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1046.113574] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1046.193317] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0ba358-0c12-4627-85bf-625ae4970806 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.200932] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ee3aa9-ffd0-40de-9418-dedbb6769596 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.230884] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f6d074-a852-411c-9db2-f583064e4cf5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.240341] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132d0538-f31b-4f6c-9139-5ec53737816d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.253319] env[61629]: DEBUG nova.compute.provider_tree [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1046.357214] env[61629]: DEBUG nova.network.neutron [None req-b66cf585-3078-4e26-8b6e-4bb732013935 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1046.475777] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d44186-a542-f11f-781a-5948fe87f5ec, 'name': SearchDatastore_Task, 'duration_secs': 0.009399} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.476670] env[61629]: DEBUG nova.network.neutron [req-24956a19-8c30-42ec-b637-6f09c8bc0922 req-8dc8a5be-c6b0-4ba5-a654-ee5a68e21bfc service nova] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Updated VIF entry in instance network info cache for port c6cc50f1-033b-4f53-924c-25fa657edba9. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1046.477013] env[61629]: DEBUG nova.network.neutron [req-24956a19-8c30-42ec-b637-6f09c8bc0922 req-8dc8a5be-c6b0-4ba5-a654-ee5a68e21bfc service nova] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Updating instance_info_cache with network_info: [{"id": "c6cc50f1-033b-4f53-924c-25fa657edba9", "address": "fa:16:3e:2d:cc:97", "network": {"id": "c1b68401-68d1-48c7-b118-722070249876", "bridge": "br-int", "label": "tempest-ServersTestJSON-1738845000-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0578ce75c37942d4ba6c8b862ceb7d92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6cc50f1-03", "ovs_interfaceid": "c6cc50f1-033b-4f53-924c-25fa657edba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.478329] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.482110] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1046.482110] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.482110] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.482110] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1046.482110] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04bc7d36-f72c-4a09-b24a-08cae857cdd4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.489928] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1046.490136] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1046.490911] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-deb3ed17-0ba2-4341-8437-2c56580b63bf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.498150] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1046.498150] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52a0cdba-1d41-044c-31df-a37dc1dbc488" [ 1046.498150] env[61629]: _type = "Task" [ 1046.498150] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.505384] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52a0cdba-1d41-044c-31df-a37dc1dbc488, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.563353] env[61629]: INFO nova.compute.manager [-] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Took 1.24 seconds to deallocate network for instance. [ 1046.756953] env[61629]: DEBUG nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1046.980561] env[61629]: DEBUG oslo_concurrency.lockutils [req-24956a19-8c30-42ec-b637-6f09c8bc0922 req-8dc8a5be-c6b0-4ba5-a654-ee5a68e21bfc service nova] Releasing lock "refresh_cache-88cf532c-b13b-4c27-8637-d24bb6d73b82" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.980982] env[61629]: DEBUG nova.compute.manager [req-24956a19-8c30-42ec-b637-6f09c8bc0922 req-8dc8a5be-c6b0-4ba5-a654-ee5a68e21bfc service nova] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Received event network-vif-deleted-91e4e033-337e-4a36-a5a7-a54b29cc6531 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1046.981345] env[61629]: INFO nova.compute.manager [req-24956a19-8c30-42ec-b637-6f09c8bc0922 req-8dc8a5be-c6b0-4ba5-a654-ee5a68e21bfc service nova] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Neutron deleted interface 91e4e033-337e-4a36-a5a7-a54b29cc6531; detaching it from the instance and deleting it from the info cache [ 1046.981499] env[61629]: DEBUG nova.network.neutron [req-24956a19-8c30-42ec-b637-6f09c8bc0922 req-8dc8a5be-c6b0-4ba5-a654-ee5a68e21bfc service nova] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.011117] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52a0cdba-1d41-044c-31df-a37dc1dbc488, 'name': SearchDatastore_Task, 'duration_secs': 0.008393} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.011961] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-becf9887-1415-48e1-b958-1d72d0aa2197 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.017327] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1047.017327] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]527a8e75-86fb-b3ef-9a4f-2e693869e427" [ 1047.017327] env[61629]: _type = "Task" [ 1047.017327] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.025376] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]527a8e75-86fb-b3ef-9a4f-2e693869e427, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.069947] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.114327] env[61629]: DEBUG nova.network.neutron [None req-b66cf585-3078-4e26-8b6e-4bb732013935 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Updating instance_info_cache with network_info: [{"id": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "address": "fa:16:3e:57:79:05", "network": {"id": "93a87623-e06d-4557-b118-c5170f70390e", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-948282834-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92d74dfdbfa74614b9950031e913799d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "423047aa-c430-4593-a76c-9982c15c81cf", "external-id": "nsx-vlan-transportzone-262", "segmentation_id": 262, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde2e75fd-8c", "ovs_interfaceid": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.261828] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61629) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1047.262070] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.679s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.262380] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.396s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.264386] env[61629]: INFO nova.compute.claims [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1047.484128] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0361a27a-df02-4e60-8015-86fd66eb9d04 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.494244] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76bce0cc-6207-4379-b0f0-a7023da808e4 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.521380] env[61629]: DEBUG nova.compute.manager [req-24956a19-8c30-42ec-b637-6f09c8bc0922 req-8dc8a5be-c6b0-4ba5-a654-ee5a68e21bfc service nova] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Detach interface failed, port_id=91e4e033-337e-4a36-a5a7-a54b29cc6531, reason: Instance 0d21b352-bdd0-4887-8658-cd5c448352d2 could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1047.529532] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]527a8e75-86fb-b3ef-9a4f-2e693869e427, 'name': SearchDatastore_Task, 'duration_secs': 0.009411} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.529790] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.530065] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 88cf532c-b13b-4c27-8637-d24bb6d73b82/88cf532c-b13b-4c27-8637-d24bb6d73b82.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1047.530344] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82007e7d-dad4-45d3-9b8d-c091ab7ba16a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.536138] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1047.536138] env[61629]: value = "task-1354609" [ 1047.536138] env[61629]: _type = "Task" [ 1047.536138] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.543135] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354609, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.600105] env[61629]: DEBUG nova.objects.instance [None req-e8f4df3b-99a8-4647-a352-dd7888f31486 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Lazy-loading 'flavor' on Instance uuid 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.619098] env[61629]: DEBUG oslo_concurrency.lockutils [None req-b66cf585-3078-4e26-8b6e-4bb732013935 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Releasing lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.619442] env[61629]: DEBUG nova.compute.manager [None req-b66cf585-3078-4e26-8b6e-4bb732013935 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Inject network info {{(pid=61629) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7219}} [ 1047.619960] env[61629]: DEBUG nova.compute.manager [None req-b66cf585-3078-4e26-8b6e-4bb732013935 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] network_info to inject: |[{"id": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "address": "fa:16:3e:57:79:05", "network": {"id": "93a87623-e06d-4557-b118-c5170f70390e", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-948282834-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92d74dfdbfa74614b9950031e913799d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "423047aa-c430-4593-a76c-9982c15c81cf", "external-id": "nsx-vlan-transportzone-262", "segmentation_id": 262, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde2e75fd-8c", "ovs_interfaceid": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1047.624683] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b66cf585-3078-4e26-8b6e-4bb732013935 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Reconfiguring VM instance to set the machine id {{(pid=61629) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1047.625425] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09d645da-f953-4898-bdeb-0e10508a3909 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.642676] env[61629]: DEBUG oslo_vmware.api [None req-b66cf585-3078-4e26-8b6e-4bb732013935 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Waiting for the task: (returnval){ [ 1047.642676] env[61629]: value = "task-1354610" [ 1047.642676] env[61629]: _type = "Task" [ 1047.642676] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.650596] env[61629]: DEBUG oslo_vmware.api [None req-b66cf585-3078-4e26-8b6e-4bb732013935 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354610, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.685985] env[61629]: DEBUG nova.compute.manager [req-c91c1475-e01e-454b-b3e3-108978043025 req-05dfe1ac-6ed3-4e57-9761-521645ed63c6 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Received event network-changed-de2e75fd-8c5a-4959-ac73-80b64539caa3 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1047.686203] env[61629]: DEBUG nova.compute.manager [req-c91c1475-e01e-454b-b3e3-108978043025 req-05dfe1ac-6ed3-4e57-9761-521645ed63c6 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Refreshing instance network info cache due to event network-changed-de2e75fd-8c5a-4959-ac73-80b64539caa3. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1047.686437] env[61629]: DEBUG oslo_concurrency.lockutils [req-c91c1475-e01e-454b-b3e3-108978043025 req-05dfe1ac-6ed3-4e57-9761-521645ed63c6 service nova] Acquiring lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.686592] env[61629]: DEBUG oslo_concurrency.lockutils [req-c91c1475-e01e-454b-b3e3-108978043025 req-05dfe1ac-6ed3-4e57-9761-521645ed63c6 service nova] Acquired lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.686760] env[61629]: DEBUG nova.network.neutron [req-c91c1475-e01e-454b-b3e3-108978043025 req-05dfe1ac-6ed3-4e57-9761-521645ed63c6 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Refreshing network info cache for port de2e75fd-8c5a-4959-ac73-80b64539caa3 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1048.046258] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354609, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469337} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.046548] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore1] 88cf532c-b13b-4c27-8637-d24bb6d73b82/88cf532c-b13b-4c27-8637-d24bb6d73b82.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1048.046771] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1048.047032] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e7be04b6-f317-46ce-b413-37787c34075a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.052807] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1048.052807] env[61629]: value = "task-1354611" [ 1048.052807] env[61629]: _type = "Task" [ 1048.052807] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.060638] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354611, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.104534] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8f4df3b-99a8-4647-a352-dd7888f31486 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquiring lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.153268] env[61629]: DEBUG oslo_vmware.api [None req-b66cf585-3078-4e26-8b6e-4bb732013935 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354610, 'name': ReconfigVM_Task, 'duration_secs': 0.206724} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.154020] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-b66cf585-3078-4e26-8b6e-4bb732013935 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Reconfigured VM instance to set the machine id {{(pid=61629) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1048.349586] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a515f8-7b0d-4e4a-b9e8-396d7b49a629 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.357230] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33539665-ee41-4f2d-ac7b-a64a896713b3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.388576] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd716861-ca67-49be-9ee6-b0e21d7c7e9e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.396080] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11619b71-65e6-453e-b9ec-127a5d89fd68 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.410251] env[61629]: DEBUG nova.compute.provider_tree [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.471407] env[61629]: DEBUG nova.network.neutron [req-c91c1475-e01e-454b-b3e3-108978043025 req-05dfe1ac-6ed3-4e57-9761-521645ed63c6 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Updated VIF entry in instance network info cache for port de2e75fd-8c5a-4959-ac73-80b64539caa3. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1048.471808] env[61629]: DEBUG nova.network.neutron [req-c91c1475-e01e-454b-b3e3-108978043025 req-05dfe1ac-6ed3-4e57-9761-521645ed63c6 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Updating instance_info_cache with network_info: [{"id": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "address": "fa:16:3e:57:79:05", "network": {"id": "93a87623-e06d-4557-b118-c5170f70390e", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-948282834-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92d74dfdbfa74614b9950031e913799d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "423047aa-c430-4593-a76c-9982c15c81cf", "external-id": "nsx-vlan-transportzone-262", "segmentation_id": 262, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde2e75fd-8c", "ovs_interfaceid": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.562901] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354611, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057988} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.563036] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1048.563847] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a07a8c-cc38-4e67-9ac1-a9423cb7358e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.585664] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 88cf532c-b13b-4c27-8637-d24bb6d73b82/88cf532c-b13b-4c27-8637-d24bb6d73b82.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1048.585909] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1614196f-59ac-4f97-b64c-31350b51add1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.604068] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1048.604068] env[61629]: value = "task-1354612" [ 1048.604068] env[61629]: _type = "Task" [ 1048.604068] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.611923] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354612, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.913435] env[61629]: DEBUG nova.scheduler.client.report [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1048.974224] env[61629]: DEBUG oslo_concurrency.lockutils [req-c91c1475-e01e-454b-b3e3-108978043025 req-05dfe1ac-6ed3-4e57-9761-521645ed63c6 service nova] Releasing lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.974599] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8f4df3b-99a8-4647-a352-dd7888f31486 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquired lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.114667] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354612, 'name': ReconfigVM_Task, 'duration_secs': 0.25595} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.114985] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 88cf532c-b13b-4c27-8637-d24bb6d73b82/88cf532c-b13b-4c27-8637-d24bb6d73b82.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1049.115889] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-70e2a772-af58-49e0-b52b-6acd93276dbf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.122474] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1049.122474] env[61629]: value = "task-1354613" [ 1049.122474] env[61629]: _type = "Task" [ 1049.122474] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.129891] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354613, 'name': Rename_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.284151] env[61629]: DEBUG nova.network.neutron [None req-e8f4df3b-99a8-4647-a352-dd7888f31486 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1049.418441] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.156s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.418982] env[61629]: DEBUG nova.compute.manager [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1049.421494] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.352s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.421717] env[61629]: DEBUG nova.objects.instance [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lazy-loading 'resources' on Instance uuid 0d21b352-bdd0-4887-8658-cd5c448352d2 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.633023] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354613, 'name': Rename_Task, 'duration_secs': 0.144869} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.633023] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1049.633023] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8815e8cc-6c23-4530-b1f5-722c2a54924c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.638877] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1049.638877] env[61629]: value = "task-1354614" [ 1049.638877] env[61629]: _type = "Task" [ 1049.638877] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.645911] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354614, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.711845] env[61629]: DEBUG nova.compute.manager [req-65b28b19-d781-489d-b8d2-05e083080926 req-d1e0a360-ecee-4cca-9fac-aeb5b95d0413 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Received event network-changed-de2e75fd-8c5a-4959-ac73-80b64539caa3 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1049.712064] env[61629]: DEBUG nova.compute.manager [req-65b28b19-d781-489d-b8d2-05e083080926 req-d1e0a360-ecee-4cca-9fac-aeb5b95d0413 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Refreshing instance network info cache due to event network-changed-de2e75fd-8c5a-4959-ac73-80b64539caa3. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1049.712268] env[61629]: DEBUG oslo_concurrency.lockutils [req-65b28b19-d781-489d-b8d2-05e083080926 req-d1e0a360-ecee-4cca-9fac-aeb5b95d0413 service nova] Acquiring lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.924743] env[61629]: DEBUG nova.compute.utils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1049.929175] env[61629]: DEBUG nova.compute.manager [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1049.929396] env[61629]: DEBUG nova.network.neutron [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1049.998081] env[61629]: DEBUG nova.policy [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec39705b9dd24915a0b3723ea45a85d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38efdd2cc07f45a49fb06d590aafb96b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 1050.004499] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb27ec2-7214-48b8-a36a-f4dfb69e1916 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.012157] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f990fb5-456a-4205-a7d8-5d9493c2b10e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.046046] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d8d532-9a18-48ce-b76f-d24cf5182361 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.054843] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7981f7a0-bf08-4c0e-b41f-4f9413ea64bc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.071326] env[61629]: DEBUG nova.compute.provider_tree [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1050.111296] env[61629]: DEBUG nova.network.neutron [None req-e8f4df3b-99a8-4647-a352-dd7888f31486 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Updating instance_info_cache with network_info: [{"id": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "address": "fa:16:3e:57:79:05", "network": {"id": "93a87623-e06d-4557-b118-c5170f70390e", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-948282834-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92d74dfdbfa74614b9950031e913799d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "423047aa-c430-4593-a76c-9982c15c81cf", "external-id": "nsx-vlan-transportzone-262", "segmentation_id": 262, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde2e75fd-8c", "ovs_interfaceid": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.148416] env[61629]: DEBUG oslo_vmware.api [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354614, 'name': PowerOnVM_Task, 'duration_secs': 0.466024} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.148734] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1050.148884] env[61629]: INFO nova.compute.manager [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Took 6.85 seconds to spawn the instance on the hypervisor. [ 1050.149083] env[61629]: DEBUG nova.compute.manager [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1050.149882] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb06e479-4ac4-40da-befd-105f074ac3dc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.279737] env[61629]: DEBUG nova.network.neutron [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Successfully created port: 4f1807c4-68f3-46c7-939e-507baf0b7c79 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1050.430375] env[61629]: DEBUG nova.compute.manager [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1050.575491] env[61629]: DEBUG nova.scheduler.client.report [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1050.613804] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e8f4df3b-99a8-4647-a352-dd7888f31486 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Releasing lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.614094] env[61629]: DEBUG nova.compute.manager [None req-e8f4df3b-99a8-4647-a352-dd7888f31486 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Inject network info {{(pid=61629) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7219}} [ 1050.614382] env[61629]: DEBUG nova.compute.manager [None req-e8f4df3b-99a8-4647-a352-dd7888f31486 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] network_info to inject: |[{"id": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "address": "fa:16:3e:57:79:05", "network": {"id": "93a87623-e06d-4557-b118-c5170f70390e", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-948282834-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92d74dfdbfa74614b9950031e913799d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "423047aa-c430-4593-a76c-9982c15c81cf", "external-id": "nsx-vlan-transportzone-262", "segmentation_id": 262, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde2e75fd-8c", "ovs_interfaceid": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1050.619065] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f4df3b-99a8-4647-a352-dd7888f31486 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Reconfiguring VM instance to set the machine id {{(pid=61629) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1050.619441] env[61629]: DEBUG oslo_concurrency.lockutils [req-65b28b19-d781-489d-b8d2-05e083080926 req-d1e0a360-ecee-4cca-9fac-aeb5b95d0413 service nova] Acquired lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.619632] env[61629]: DEBUG nova.network.neutron [req-65b28b19-d781-489d-b8d2-05e083080926 req-d1e0a360-ecee-4cca-9fac-aeb5b95d0413 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Refreshing network info cache for port de2e75fd-8c5a-4959-ac73-80b64539caa3 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1050.620791] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-329f430b-1db2-46c0-b88b-4716434f45c7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.638429] env[61629]: DEBUG oslo_vmware.api [None req-e8f4df3b-99a8-4647-a352-dd7888f31486 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Waiting for the task: (returnval){ [ 1050.638429] env[61629]: value = "task-1354615" [ 1050.638429] env[61629]: _type = "Task" [ 1050.638429] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.649607] env[61629]: DEBUG oslo_vmware.api [None req-e8f4df3b-99a8-4647-a352-dd7888f31486 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354615, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.668773] env[61629]: INFO nova.compute.manager [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Took 11.57 seconds to build instance. [ 1050.981057] env[61629]: DEBUG nova.network.neutron [req-65b28b19-d781-489d-b8d2-05e083080926 req-d1e0a360-ecee-4cca-9fac-aeb5b95d0413 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Updated VIF entry in instance network info cache for port de2e75fd-8c5a-4959-ac73-80b64539caa3. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1050.981455] env[61629]: DEBUG nova.network.neutron [req-65b28b19-d781-489d-b8d2-05e083080926 req-d1e0a360-ecee-4cca-9fac-aeb5b95d0413 service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Updating instance_info_cache with network_info: [{"id": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "address": "fa:16:3e:57:79:05", "network": {"id": "93a87623-e06d-4557-b118-c5170f70390e", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-948282834-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92d74dfdbfa74614b9950031e913799d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "423047aa-c430-4593-a76c-9982c15c81cf", "external-id": "nsx-vlan-transportzone-262", "segmentation_id": 262, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde2e75fd-8c", "ovs_interfaceid": "de2e75fd-8c5a-4959-ac73-80b64539caa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.080672] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.659s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.107058] env[61629]: INFO nova.scheduler.client.report [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Deleted allocations for instance 0d21b352-bdd0-4887-8658-cd5c448352d2 [ 1051.152865] env[61629]: DEBUG oslo_vmware.api [None req-e8f4df3b-99a8-4647-a352-dd7888f31486 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354615, 'name': ReconfigVM_Task, 'duration_secs': 0.132037} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.153861] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f4df3b-99a8-4647-a352-dd7888f31486 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Reconfigured VM instance to set the machine id {{(pid=61629) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1051.169950] env[61629]: DEBUG oslo_concurrency.lockutils [None req-82d6ad7a-af0e-4af3-8cb6-99078c2e2304 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "88cf532c-b13b-4c27-8637-d24bb6d73b82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.082s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.274041] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquiring lock "9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.274336] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Lock "9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.274555] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquiring lock "9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.274744] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Lock "9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.274970] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Lock "9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.277119] env[61629]: INFO nova.compute.manager [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Terminating instance [ 1051.278969] env[61629]: DEBUG nova.compute.manager [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1051.279188] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1051.280043] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954e9ded-bfa1-4882-8360-f3097f9ffc61 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.288879] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1051.289515] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c42df0ed-839b-4447-a606-a516c62b3c4f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.295435] env[61629]: DEBUG oslo_vmware.api [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Waiting for the task: (returnval){ [ 1051.295435] env[61629]: value = "task-1354616" [ 1051.295435] env[61629]: _type = "Task" [ 1051.295435] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.303711] env[61629]: DEBUG oslo_vmware.api [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354616, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.441815] env[61629]: DEBUG nova.compute.manager [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1051.473213] env[61629]: DEBUG nova.virt.hardware [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1051.473627] env[61629]: DEBUG nova.virt.hardware [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1051.473916] env[61629]: DEBUG nova.virt.hardware [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1051.474249] env[61629]: DEBUG nova.virt.hardware [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1051.474507] env[61629]: DEBUG nova.virt.hardware [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1051.474757] env[61629]: DEBUG nova.virt.hardware [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1051.475148] env[61629]: DEBUG nova.virt.hardware [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1051.475409] env[61629]: DEBUG nova.virt.hardware [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1051.475703] env[61629]: DEBUG nova.virt.hardware [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1051.475980] env[61629]: DEBUG nova.virt.hardware [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1051.476328] env[61629]: DEBUG nova.virt.hardware [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1051.477817] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0a7d45-ded7-45ad-865d-de9fe5f56267 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.485294] env[61629]: DEBUG oslo_concurrency.lockutils [req-65b28b19-d781-489d-b8d2-05e083080926 req-d1e0a360-ecee-4cca-9fac-aeb5b95d0413 service nova] Releasing lock "refresh_cache-9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.487858] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb23e45e-41e3-400e-a392-ed3b9e9de8be {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.614592] env[61629]: DEBUG oslo_concurrency.lockutils [None req-4fd70add-9d9f-4461-8561-a98b2d7ee25b tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "0d21b352-bdd0-4887-8658-cd5c448352d2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.407s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.704592] env[61629]: DEBUG oslo_concurrency.lockutils [None req-68778e64-b022-47f4-8a5b-6a485bd4d0c3 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "88cf532c-b13b-4c27-8637-d24bb6d73b82" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.704933] env[61629]: DEBUG oslo_concurrency.lockutils [None req-68778e64-b022-47f4-8a5b-6a485bd4d0c3 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "88cf532c-b13b-4c27-8637-d24bb6d73b82" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.705144] env[61629]: DEBUG nova.compute.manager [None req-68778e64-b022-47f4-8a5b-6a485bd4d0c3 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1051.706090] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744eea9e-3ae2-4cda-b133-d30b4712c9c3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.714278] env[61629]: DEBUG nova.compute.manager [None req-68778e64-b022-47f4-8a5b-6a485bd4d0c3 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61629) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1051.714580] env[61629]: DEBUG nova.objects.instance [None req-68778e64-b022-47f4-8a5b-6a485bd4d0c3 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lazy-loading 'flavor' on Instance uuid 88cf532c-b13b-4c27-8637-d24bb6d73b82 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.743995] env[61629]: DEBUG nova.compute.manager [req-766b7386-21dc-4b45-8eef-fc7a1b6849c1 req-cd845639-db9f-4acd-887e-99c2f2d430d5 service nova] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Received event network-vif-plugged-4f1807c4-68f3-46c7-939e-507baf0b7c79 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1051.744285] env[61629]: DEBUG oslo_concurrency.lockutils [req-766b7386-21dc-4b45-8eef-fc7a1b6849c1 req-cd845639-db9f-4acd-887e-99c2f2d430d5 service nova] Acquiring lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.744549] env[61629]: DEBUG oslo_concurrency.lockutils [req-766b7386-21dc-4b45-8eef-fc7a1b6849c1 req-cd845639-db9f-4acd-887e-99c2f2d430d5 service nova] Lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.744827] env[61629]: DEBUG oslo_concurrency.lockutils [req-766b7386-21dc-4b45-8eef-fc7a1b6849c1 req-cd845639-db9f-4acd-887e-99c2f2d430d5 service nova] Lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.745049] env[61629]: DEBUG nova.compute.manager [req-766b7386-21dc-4b45-8eef-fc7a1b6849c1 req-cd845639-db9f-4acd-887e-99c2f2d430d5 service nova] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] No waiting events found dispatching network-vif-plugged-4f1807c4-68f3-46c7-939e-507baf0b7c79 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1051.745272] env[61629]: WARNING nova.compute.manager [req-766b7386-21dc-4b45-8eef-fc7a1b6849c1 req-cd845639-db9f-4acd-887e-99c2f2d430d5 service nova] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Received unexpected event network-vif-plugged-4f1807c4-68f3-46c7-939e-507baf0b7c79 for instance with vm_state building and task_state spawning. [ 1051.785216] env[61629]: DEBUG nova.network.neutron [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Successfully updated port: 4f1807c4-68f3-46c7-939e-507baf0b7c79 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1051.805802] env[61629]: DEBUG oslo_vmware.api [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354616, 'name': PowerOffVM_Task, 'duration_secs': 0.19652} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.806098] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1051.806280] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1051.806571] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c984c974-dec5-4cbc-9273-a56408dfa1fe {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.871633] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1051.871926] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1051.872138] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Deleting the datastore file [datastore1] 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1051.872411] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d59e5f39-528b-4f2c-900e-4388a86a9793 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.879289] env[61629]: DEBUG oslo_vmware.api [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Waiting for the task: (returnval){ [ 1051.879289] env[61629]: value = "task-1354618" [ 1051.879289] env[61629]: _type = "Task" [ 1051.879289] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.886714] env[61629]: DEBUG oslo_vmware.api [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354618, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.951387] env[61629]: DEBUG oslo_concurrency.lockutils [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "87a1383f-d66b-4bde-b153-89ac62ff8390" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.951661] env[61629]: DEBUG oslo_concurrency.lockutils [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "87a1383f-d66b-4bde-b153-89ac62ff8390" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.951913] env[61629]: DEBUG oslo_concurrency.lockutils [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "87a1383f-d66b-4bde-b153-89ac62ff8390-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.952164] env[61629]: DEBUG oslo_concurrency.lockutils [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "87a1383f-d66b-4bde-b153-89ac62ff8390-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.952356] env[61629]: DEBUG oslo_concurrency.lockutils [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "87a1383f-d66b-4bde-b153-89ac62ff8390-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.954841] env[61629]: INFO nova.compute.manager [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Terminating instance [ 1051.957086] env[61629]: DEBUG nova.compute.manager [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1051.957312] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1051.958256] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3de5f73-8ebd-4299-8000-997d2bb10af1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.966533] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1051.966799] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8326e78e-c82f-4a49-98b0-6f492b17053c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.973027] env[61629]: DEBUG oslo_vmware.api [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 1051.973027] env[61629]: value = "task-1354619" [ 1051.973027] env[61629]: _type = "Task" [ 1051.973027] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.981320] env[61629]: DEBUG oslo_vmware.api [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354619, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.221023] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-68778e64-b022-47f4-8a5b-6a485bd4d0c3 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1052.221023] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b72e1e8-8119-4fe1-a83d-096b559acb84 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.227715] env[61629]: DEBUG oslo_vmware.api [None req-68778e64-b022-47f4-8a5b-6a485bd4d0c3 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1052.227715] env[61629]: value = "task-1354620" [ 1052.227715] env[61629]: _type = "Task" [ 1052.227715] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.235882] env[61629]: DEBUG oslo_vmware.api [None req-68778e64-b022-47f4-8a5b-6a485bd4d0c3 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354620, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.288234] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "refresh_cache-843e230d-fb7e-4375-83f4-78fe6d9de9ff" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.288385] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "refresh_cache-843e230d-fb7e-4375-83f4-78fe6d9de9ff" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.288533] env[61629]: DEBUG nova.network.neutron [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1052.389364] env[61629]: DEBUG oslo_vmware.api [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Task: {'id': task-1354618, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144677} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.389740] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1052.389869] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1052.390077] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1052.390267] env[61629]: INFO nova.compute.manager [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1052.390570] env[61629]: DEBUG oslo.service.loopingcall [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1052.390784] env[61629]: DEBUG nova.compute.manager [-] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1052.390878] env[61629]: DEBUG nova.network.neutron [-] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1052.485176] env[61629]: DEBUG oslo_vmware.api [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354619, 'name': PowerOffVM_Task, 'duration_secs': 0.188208} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.485465] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.485699] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1052.485974] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9569dce-08c3-48c1-86ba-177cfbbfd437 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.551023] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1052.551278] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1052.551469] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Deleting the datastore file [datastore2] 87a1383f-d66b-4bde-b153-89ac62ff8390 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1052.551884] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-89818771-b031-4abb-87b9-43dae3853117 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.558924] env[61629]: DEBUG oslo_vmware.api [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for the task: (returnval){ [ 1052.558924] env[61629]: value = "task-1354622" [ 1052.558924] env[61629]: _type = "Task" [ 1052.558924] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.566658] env[61629]: DEBUG oslo_vmware.api [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354622, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.737241] env[61629]: DEBUG oslo_vmware.api [None req-68778e64-b022-47f4-8a5b-6a485bd4d0c3 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354620, 'name': PowerOffVM_Task, 'duration_secs': 0.202944} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.737572] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-68778e64-b022-47f4-8a5b-6a485bd4d0c3 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.737800] env[61629]: DEBUG nova.compute.manager [None req-68778e64-b022-47f4-8a5b-6a485bd4d0c3 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1052.738588] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9683368b-83fc-4f0d-862e-6c97686d9a41 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.840758] env[61629]: DEBUG nova.network.neutron [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1053.000418] env[61629]: DEBUG nova.network.neutron [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updating instance_info_cache with network_info: [{"id": "4f1807c4-68f3-46c7-939e-507baf0b7c79", "address": "fa:16:3e:ca:02:a7", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f1807c4-68", "ovs_interfaceid": "4f1807c4-68f3-46c7-939e-507baf0b7c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.071608] env[61629]: DEBUG oslo_vmware.api [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Task: {'id': task-1354622, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152562} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.071608] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1053.071608] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1053.071608] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1053.071608] env[61629]: INFO nova.compute.manager [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1053.071608] env[61629]: DEBUG oslo.service.loopingcall [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1053.071608] env[61629]: DEBUG nova.compute.manager [-] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1053.071608] env[61629]: DEBUG nova.network.neutron [-] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1053.250586] env[61629]: DEBUG oslo_concurrency.lockutils [None req-68778e64-b022-47f4-8a5b-6a485bd4d0c3 tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "88cf532c-b13b-4c27-8637-d24bb6d73b82" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.546s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.399443] env[61629]: DEBUG nova.network.neutron [-] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.506395] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "refresh_cache-843e230d-fb7e-4375-83f4-78fe6d9de9ff" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.506732] env[61629]: DEBUG nova.compute.manager [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Instance network_info: |[{"id": "4f1807c4-68f3-46c7-939e-507baf0b7c79", "address": "fa:16:3e:ca:02:a7", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f1807c4-68", "ovs_interfaceid": "4f1807c4-68f3-46c7-939e-507baf0b7c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1053.507196] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:02:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4f1807c4-68f3-46c7-939e-507baf0b7c79', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1053.514700] env[61629]: DEBUG oslo.service.loopingcall [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1053.514923] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1053.515168] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e43a8e5-ae79-43ad-a973-97e00d0ea59c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.537045] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1053.537045] env[61629]: value = "task-1354623" [ 1053.537045] env[61629]: _type = "Task" [ 1053.537045] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.545213] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354623, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.778021] env[61629]: DEBUG nova.compute.manager [req-80f53b68-748b-401f-9191-67d8814361d9 req-655fd4db-57a7-41e3-8562-5d16b396788a service nova] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Received event network-changed-4f1807c4-68f3-46c7-939e-507baf0b7c79 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1053.778700] env[61629]: DEBUG nova.compute.manager [req-80f53b68-748b-401f-9191-67d8814361d9 req-655fd4db-57a7-41e3-8562-5d16b396788a service nova] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Refreshing instance network info cache due to event network-changed-4f1807c4-68f3-46c7-939e-507baf0b7c79. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1053.779070] env[61629]: DEBUG oslo_concurrency.lockutils [req-80f53b68-748b-401f-9191-67d8814361d9 req-655fd4db-57a7-41e3-8562-5d16b396788a service nova] Acquiring lock "refresh_cache-843e230d-fb7e-4375-83f4-78fe6d9de9ff" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.779337] env[61629]: DEBUG oslo_concurrency.lockutils [req-80f53b68-748b-401f-9191-67d8814361d9 req-655fd4db-57a7-41e3-8562-5d16b396788a service nova] Acquired lock "refresh_cache-843e230d-fb7e-4375-83f4-78fe6d9de9ff" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.779564] env[61629]: DEBUG nova.network.neutron [req-80f53b68-748b-401f-9191-67d8814361d9 req-655fd4db-57a7-41e3-8562-5d16b396788a service nova] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Refreshing network info cache for port 4f1807c4-68f3-46c7-939e-507baf0b7c79 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1053.809103] env[61629]: DEBUG nova.network.neutron [-] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.902518] env[61629]: INFO nova.compute.manager [-] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Took 1.51 seconds to deallocate network for instance. [ 1054.049400] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354623, 'name': CreateVM_Task, 'duration_secs': 0.354642} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.049400] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1054.049400] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.049400] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.049668] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1054.049932] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86b6a8da-fb97-45be-b149-969d49b35c69 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.054711] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1054.054711] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]523e6549-ee18-3ddb-81e5-5908e2775817" [ 1054.054711] env[61629]: _type = "Task" [ 1054.054711] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.063886] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]523e6549-ee18-3ddb-81e5-5908e2775817, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.311657] env[61629]: INFO nova.compute.manager [-] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Took 1.24 seconds to deallocate network for instance. [ 1054.408605] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.408877] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.409115] env[61629]: DEBUG nova.objects.instance [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Lazy-loading 'resources' on Instance uuid 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.472162] env[61629]: DEBUG nova.network.neutron [req-80f53b68-748b-401f-9191-67d8814361d9 req-655fd4db-57a7-41e3-8562-5d16b396788a service nova] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updated VIF entry in instance network info cache for port 4f1807c4-68f3-46c7-939e-507baf0b7c79. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1054.472533] env[61629]: DEBUG nova.network.neutron [req-80f53b68-748b-401f-9191-67d8814361d9 req-655fd4db-57a7-41e3-8562-5d16b396788a service nova] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updating instance_info_cache with network_info: [{"id": "4f1807c4-68f3-46c7-939e-507baf0b7c79", "address": "fa:16:3e:ca:02:a7", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f1807c4-68", "ovs_interfaceid": "4f1807c4-68f3-46c7-939e-507baf0b7c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.564789] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]523e6549-ee18-3ddb-81e5-5908e2775817, 'name': SearchDatastore_Task, 'duration_secs': 0.008903} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.565116] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1054.565360] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1054.565610] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.565761] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.565941] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1054.567120] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1297c3f8-3811-4828-8ff7-a3f15451650e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.574042] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1054.574226] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1054.574910] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-806a2060-b245-48d9-86b6-0df76020e38f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.580350] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1054.580350] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52d3f670-5f07-f3da-0e29-f69db817cf37" [ 1054.580350] env[61629]: _type = "Task" [ 1054.580350] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.587313] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d3f670-5f07-f3da-0e29-f69db817cf37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.632480] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "88cf532c-b13b-4c27-8637-d24bb6d73b82" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.632761] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "88cf532c-b13b-4c27-8637-d24bb6d73b82" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.632974] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "88cf532c-b13b-4c27-8637-d24bb6d73b82-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.633173] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "88cf532c-b13b-4c27-8637-d24bb6d73b82-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.633353] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "88cf532c-b13b-4c27-8637-d24bb6d73b82-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.635490] env[61629]: INFO nova.compute.manager [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Terminating instance [ 1054.637227] env[61629]: DEBUG nova.compute.manager [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1054.637429] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1054.638257] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67d1eee-08c2-44c7-b744-139c23727541 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.646150] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1054.646401] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16bbd95b-6aac-4947-9bbf-3136b7abac13 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.728887] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1054.729135] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Deleting contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1054.729419] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleting the datastore file [datastore1] 88cf532c-b13b-4c27-8637-d24bb6d73b82 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1054.729689] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29c9b9e9-1c47-43b6-9a6c-b4387defc332 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.737042] env[61629]: DEBUG oslo_vmware.api [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for the task: (returnval){ [ 1054.737042] env[61629]: value = "task-1354625" [ 1054.737042] env[61629]: _type = "Task" [ 1054.737042] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.744025] env[61629]: DEBUG oslo_vmware.api [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.818397] env[61629]: DEBUG oslo_concurrency.lockutils [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.972890] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43abbfcc-7d57-4f02-8922-9ca8963b2de7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.975637] env[61629]: DEBUG oslo_concurrency.lockutils [req-80f53b68-748b-401f-9191-67d8814361d9 req-655fd4db-57a7-41e3-8562-5d16b396788a service nova] Releasing lock "refresh_cache-843e230d-fb7e-4375-83f4-78fe6d9de9ff" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1054.975879] env[61629]: DEBUG nova.compute.manager [req-80f53b68-748b-401f-9191-67d8814361d9 req-655fd4db-57a7-41e3-8562-5d16b396788a service nova] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Received event network-vif-deleted-de2e75fd-8c5a-4959-ac73-80b64539caa3 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1054.976087] env[61629]: DEBUG nova.compute.manager [req-80f53b68-748b-401f-9191-67d8814361d9 req-655fd4db-57a7-41e3-8562-5d16b396788a service nova] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Received event network-vif-deleted-bff06c9b-54d2-4109-b2de-70fbab2c58d4 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1054.976266] env[61629]: INFO nova.compute.manager [req-80f53b68-748b-401f-9191-67d8814361d9 req-655fd4db-57a7-41e3-8562-5d16b396788a service nova] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Neutron deleted interface bff06c9b-54d2-4109-b2de-70fbab2c58d4; detaching it from the instance and deleting it from the info cache [ 1054.976436] env[61629]: DEBUG nova.network.neutron [req-80f53b68-748b-401f-9191-67d8814361d9 req-655fd4db-57a7-41e3-8562-5d16b396788a service nova] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.982666] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e9ead5-71a8-41e1-a42c-8820e9dec087 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.013669] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad586f18-92a5-40d4-acfc-2f36da3a4077 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.020427] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49daa8a8-c955-480a-84d5-759f826e5a3e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.033815] env[61629]: DEBUG nova.compute.provider_tree [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1055.088836] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52d3f670-5f07-f3da-0e29-f69db817cf37, 'name': SearchDatastore_Task, 'duration_secs': 0.007576} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.089619] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9b488ea-cd39-4ce7-9967-dfbd56f43f0c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.094347] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1055.094347] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52dd9d50-93d1-398b-f5fa-c3ad173435d3" [ 1055.094347] env[61629]: _type = "Task" [ 1055.094347] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.101437] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52dd9d50-93d1-398b-f5fa-c3ad173435d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.245574] env[61629]: DEBUG oslo_vmware.api [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Task: {'id': task-1354625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125094} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.245836] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1055.246038] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Deleted contents of the VM from datastore datastore1 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1055.246226] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1055.246405] env[61629]: INFO nova.compute.manager [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1055.246709] env[61629]: DEBUG oslo.service.loopingcall [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1055.246837] env[61629]: DEBUG nova.compute.manager [-] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1055.246953] env[61629]: DEBUG nova.network.neutron [-] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1055.479828] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-78cd4ea2-4ff5-4136-8724-a23ae6c035b3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.490512] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74268504-db7e-4f6a-bc4f-9592789e2570 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.513968] env[61629]: DEBUG nova.compute.manager [req-80f53b68-748b-401f-9191-67d8814361d9 req-655fd4db-57a7-41e3-8562-5d16b396788a service nova] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Detach interface failed, port_id=bff06c9b-54d2-4109-b2de-70fbab2c58d4, reason: Instance 87a1383f-d66b-4bde-b153-89ac62ff8390 could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1055.536411] env[61629]: DEBUG nova.scheduler.client.report [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1055.604510] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52dd9d50-93d1-398b-f5fa-c3ad173435d3, 'name': SearchDatastore_Task, 'duration_secs': 0.008132} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.604761] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.605035] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 843e230d-fb7e-4375-83f4-78fe6d9de9ff/843e230d-fb7e-4375-83f4-78fe6d9de9ff.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1055.605298] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a3189ff-4437-4e83-bf85-ae3ebfc7a48d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.611276] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1055.611276] env[61629]: value = "task-1354626" [ 1055.611276] env[61629]: _type = "Task" [ 1055.611276] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.618782] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354626, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.807882] env[61629]: DEBUG nova.compute.manager [req-cb1f4ec5-804e-4b84-ac2f-fff6e1f242be req-91b92c9e-fd92-4c97-b2c3-6d4527078eb1 service nova] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Received event network-vif-deleted-c6cc50f1-033b-4f53-924c-25fa657edba9 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1055.808133] env[61629]: INFO nova.compute.manager [req-cb1f4ec5-804e-4b84-ac2f-fff6e1f242be req-91b92c9e-fd92-4c97-b2c3-6d4527078eb1 service nova] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Neutron deleted interface c6cc50f1-033b-4f53-924c-25fa657edba9; detaching it from the instance and deleting it from the info cache [ 1055.808315] env[61629]: DEBUG nova.network.neutron [req-cb1f4ec5-804e-4b84-ac2f-fff6e1f242be req-91b92c9e-fd92-4c97-b2c3-6d4527078eb1 service nova] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.988500] env[61629]: DEBUG nova.network.neutron [-] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.041188] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.632s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.043715] env[61629]: DEBUG oslo_concurrency.lockutils [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.225s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.044009] env[61629]: DEBUG nova.objects.instance [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lazy-loading 'resources' on Instance uuid 87a1383f-d66b-4bde-b153-89ac62ff8390 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1056.063288] env[61629]: INFO nova.scheduler.client.report [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Deleted allocations for instance 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4 [ 1056.121600] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354626, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.445777} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.121836] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 843e230d-fb7e-4375-83f4-78fe6d9de9ff/843e230d-fb7e-4375-83f4-78fe6d9de9ff.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1056.122067] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1056.122343] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2ba36c12-b237-40bd-b959-b4652917a65e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.129357] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1056.129357] env[61629]: value = "task-1354627" [ 1056.129357] env[61629]: _type = "Task" [ 1056.129357] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.137166] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354627, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.311081] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b883474-4474-4e7d-ba34-824d4a9f014b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.320295] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a93c73a-404d-4030-bbcf-9a82921ef468 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.341996] env[61629]: DEBUG nova.compute.manager [req-cb1f4ec5-804e-4b84-ac2f-fff6e1f242be req-91b92c9e-fd92-4c97-b2c3-6d4527078eb1 service nova] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Detach interface failed, port_id=c6cc50f1-033b-4f53-924c-25fa657edba9, reason: Instance 88cf532c-b13b-4c27-8637-d24bb6d73b82 could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1056.491416] env[61629]: INFO nova.compute.manager [-] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Took 1.24 seconds to deallocate network for instance. [ 1056.570627] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e48ffd71-2f3c-463c-b988-22c870627289 tempest-AttachInterfacesUnderV243Test-1812694388 tempest-AttachInterfacesUnderV243Test-1812694388-project-member] Lock "9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.296s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.600624] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37611b8-e59c-402b-bfe5-cc7185f6b2c3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.608041] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f313353-dc92-4855-9627-0ec0b41a0a7d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.640925] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869dfe86-a3c6-4d99-a03f-8247e2c84bca {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.648156] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354627, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.056596} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.650070] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1056.650825] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1064609e-e1ca-46d9-8a98-dafa81ab128f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.653897] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad9f829-789b-420e-843f-e25c2711c28f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.667150] env[61629]: DEBUG nova.compute.provider_tree [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.685368] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 843e230d-fb7e-4375-83f4-78fe6d9de9ff/843e230d-fb7e-4375-83f4-78fe6d9de9ff.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1056.686320] env[61629]: DEBUG nova.scheduler.client.report [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1056.689210] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7945f05c-abb8-41dc-96d8-f6ebac7915a3 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.709907] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1056.709907] env[61629]: value = "task-1354628" [ 1056.709907] env[61629]: _type = "Task" [ 1056.709907] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.717478] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354628, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.001726] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.204429] env[61629]: DEBUG oslo_concurrency.lockutils [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.161s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.207461] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.205s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.207461] env[61629]: DEBUG nova.objects.instance [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lazy-loading 'resources' on Instance uuid 88cf532c-b13b-4c27-8637-d24bb6d73b82 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.219298] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354628, 'name': ReconfigVM_Task, 'duration_secs': 0.278368} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.220295] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 843e230d-fb7e-4375-83f4-78fe6d9de9ff/843e230d-fb7e-4375-83f4-78fe6d9de9ff.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1057.221023] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab25a2f5-f612-47fb-96f4-851cbb703d1c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.228264] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1057.228264] env[61629]: value = "task-1354629" [ 1057.228264] env[61629]: _type = "Task" [ 1057.228264] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.232186] env[61629]: INFO nova.scheduler.client.report [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Deleted allocations for instance 87a1383f-d66b-4bde-b153-89ac62ff8390 [ 1057.239338] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354629, 'name': Rename_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.742356] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354629, 'name': Rename_Task, 'duration_secs': 0.151135} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.742832] env[61629]: DEBUG oslo_concurrency.lockutils [None req-72ca1797-5aaf-4f6d-bbdc-e7c8477f5db4 tempest-ServerRescueNegativeTestJSON-1305254252 tempest-ServerRescueNegativeTestJSON-1305254252-project-member] Lock "87a1383f-d66b-4bde-b153-89ac62ff8390" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.791s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.745683] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1057.746111] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-94907d78-d32f-4e0d-99ea-b74c25e981d6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.753918] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1057.753918] env[61629]: value = "task-1354630" [ 1057.753918] env[61629]: _type = "Task" [ 1057.753918] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.758589] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e30e978-29b5-4e4f-bf9b-e36a07532b82 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.770554] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354630, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.771659] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a16e5f0e-4f3a-4ee1-9c84-cda3d1015439 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.802673] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a183ff-1bc5-4223-bf21-1e42cf70c25b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.810200] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def6a18f-4b62-4b98-9e3b-e9427b270a10 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.823829] env[61629]: DEBUG nova.compute.provider_tree [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1058.262746] env[61629]: DEBUG oslo_vmware.api [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354630, 'name': PowerOnVM_Task, 'duration_secs': 0.467848} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.263035] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1058.263321] env[61629]: INFO nova.compute.manager [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Took 6.82 seconds to spawn the instance on the hypervisor. [ 1058.263433] env[61629]: DEBUG nova.compute.manager [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1058.264300] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3bc186f-cd55-4ceb-91ed-3f89e2d748e9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.326964] env[61629]: DEBUG nova.scheduler.client.report [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1058.788253] env[61629]: INFO nova.compute.manager [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Took 12.94 seconds to build instance. [ 1058.832127] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.625s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.859019] env[61629]: INFO nova.scheduler.client.report [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Deleted allocations for instance 88cf532c-b13b-4c27-8637-d24bb6d73b82 [ 1059.293532] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f5350545-1e93-4fff-bcbc-8550dfd8b836 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.451s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.366842] env[61629]: DEBUG oslo_concurrency.lockutils [None req-7fd66842-2a61-43f0-889d-4320ba68f2dd tempest-ServersTestJSON-1460186850 tempest-ServersTestJSON-1460186850-project-member] Lock "88cf532c-b13b-4c27-8637-d24bb6d73b82" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.734s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.215061] env[61629]: DEBUG nova.compute.manager [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Stashing vm_state: active {{(pid=61629) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1060.745463] env[61629]: DEBUG oslo_concurrency.lockutils [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.746329] env[61629]: DEBUG oslo_concurrency.lockutils [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.251117] env[61629]: INFO nova.compute.claims [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1061.764252] env[61629]: INFO nova.compute.resource_tracker [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updating resource usage from migration c8908495-b24e-45fe-bf2f-930f99e91822 [ 1061.828406] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e0b659-c40d-4ed0-bbba-eb31a7c1b0c0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.837130] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357c7fe7-baaf-4a8c-957d-c54adc341c58 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.870704] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761bbf18-883f-4b2d-b404-5fd5514bc84e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.880238] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff05878c-cdac-40c5-9bc4-1d8edb0d377c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.892924] env[61629]: DEBUG nova.compute.provider_tree [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1062.396120] env[61629]: DEBUG nova.scheduler.client.report [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1062.493343] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Acquiring lock "03327851-5bf7-47f0-b0b2-ce4b763225cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.493633] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Lock "03327851-5bf7-47f0-b0b2-ce4b763225cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.903258] env[61629]: DEBUG oslo_concurrency.lockutils [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.157s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.903500] env[61629]: INFO nova.compute.manager [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Migrating [ 1062.995438] env[61629]: DEBUG nova.compute.manager [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1063.418302] env[61629]: DEBUG oslo_concurrency.lockutils [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "refresh_cache-843e230d-fb7e-4375-83f4-78fe6d9de9ff" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.418644] env[61629]: DEBUG oslo_concurrency.lockutils [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "refresh_cache-843e230d-fb7e-4375-83f4-78fe6d9de9ff" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.418685] env[61629]: DEBUG nova.network.neutron [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1063.518170] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.518430] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.519965] env[61629]: INFO nova.compute.claims [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1064.112770] env[61629]: DEBUG nova.network.neutron [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updating instance_info_cache with network_info: [{"id": "4f1807c4-68f3-46c7-939e-507baf0b7c79", "address": "fa:16:3e:ca:02:a7", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f1807c4-68", "ovs_interfaceid": "4f1807c4-68f3-46c7-939e-507baf0b7c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.574333] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dbdf764-0e57-4da6-bfbf-c0e46dfe5e8f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.581895] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3647d3d8-5a63-4117-80db-d1555a6082ec {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.610662] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e69dbe4-4f7e-4183-ab33-30834e872678 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.616844] env[61629]: DEBUG oslo_concurrency.lockutils [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "refresh_cache-843e230d-fb7e-4375-83f4-78fe6d9de9ff" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.619113] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068d7001-6570-4268-a145-14f86104a562 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.632570] env[61629]: DEBUG nova.compute.provider_tree [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1065.135760] env[61629]: DEBUG nova.scheduler.client.report [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1065.640052] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.121s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.135756] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c353c1-fb21-4ec3-a52a-5dce2f2f18e6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.142044] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Acquiring lock "384b20bb-a3e1-4bb9-8502-b4c7664d92e4" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.142302] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Lock "384b20bb-a3e1-4bb9-8502-b4c7664d92e4" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.155846] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updating instance '843e230d-fb7e-4375-83f4-78fe6d9de9ff' progress to 0 {{(pid=61629) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1066.660308] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Lock "384b20bb-a3e1-4bb9-8502-b4c7664d92e4" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.518s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.660883] env[61629]: DEBUG nova.compute.manager [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1066.664626] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1066.665108] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5c06100-1b12-4c26-bc4a-085e138f213f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.672214] env[61629]: DEBUG oslo_vmware.api [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1066.672214] env[61629]: value = "task-1354631" [ 1066.672214] env[61629]: _type = "Task" [ 1066.672214] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.680021] env[61629]: DEBUG oslo_vmware.api [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354631, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.170023] env[61629]: DEBUG nova.compute.utils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1067.174020] env[61629]: DEBUG nova.compute.manager [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1067.174124] env[61629]: DEBUG nova.network.neutron [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1067.199284] env[61629]: DEBUG oslo_vmware.api [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354631, 'name': PowerOffVM_Task, 'duration_secs': 0.199592} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.200252] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1067.200578] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updating instance '843e230d-fb7e-4375-83f4-78fe6d9de9ff' progress to 17 {{(pid=61629) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1067.241958] env[61629]: DEBUG nova.policy [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b7e85d7fb01d401fa9cbecb766408e5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4b4107fc57f54319973ab87715e93e61', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 1067.488414] env[61629]: DEBUG nova.network.neutron [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Successfully created port: c58b1d68-4864-433f-abc5-85f78006fa3f {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1067.677327] env[61629]: DEBUG nova.compute.manager [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1067.712311] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:56Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1067.712558] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1067.712722] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1067.712911] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1067.713076] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1067.713232] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1067.713442] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1067.713611] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1067.713784] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1067.713955] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1067.714152] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1067.719614] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f3db369-15a3-4e8b-a08b-f22760c1834f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.736330] env[61629]: DEBUG oslo_vmware.api [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1067.736330] env[61629]: value = "task-1354632" [ 1067.736330] env[61629]: _type = "Task" [ 1067.736330] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.744531] env[61629]: DEBUG oslo_vmware.api [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354632, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.245980] env[61629]: DEBUG oslo_vmware.api [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354632, 'name': ReconfigVM_Task, 'duration_secs': 0.134967} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.246336] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updating instance '843e230d-fb7e-4375-83f4-78fe6d9de9ff' progress to 33 {{(pid=61629) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1068.687653] env[61629]: DEBUG nova.compute.manager [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1068.714842] env[61629]: DEBUG nova.virt.hardware [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1068.715218] env[61629]: DEBUG nova.virt.hardware [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1068.715322] env[61629]: DEBUG nova.virt.hardware [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1068.715490] env[61629]: DEBUG nova.virt.hardware [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1068.715642] env[61629]: DEBUG nova.virt.hardware [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1068.715796] env[61629]: DEBUG nova.virt.hardware [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1068.716010] env[61629]: DEBUG nova.virt.hardware [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1068.716328] env[61629]: DEBUG nova.virt.hardware [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1068.716388] env[61629]: DEBUG nova.virt.hardware [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1068.716557] env[61629]: DEBUG nova.virt.hardware [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1068.716735] env[61629]: DEBUG nova.virt.hardware [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1068.717613] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36544f85-1e90-41ab-8a2d-b5c3cfe8f5dd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.727070] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fe81ee-cb8d-4673-aafc-394883ecfe38 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.752622] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1068.752893] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1068.753113] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1068.753323] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1068.753478] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1068.753628] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1068.753832] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1068.754272] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1068.754509] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1068.754694] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1068.754879] env[61629]: DEBUG nova.virt.hardware [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1068.760258] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Reconfiguring VM instance instance-00000068 to detach disk 2000 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1068.760536] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0638b7c-7626-471b-8cbf-7e12e6df493e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.778500] env[61629]: DEBUG oslo_vmware.api [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1068.778500] env[61629]: value = "task-1354633" [ 1068.778500] env[61629]: _type = "Task" [ 1068.778500] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.786590] env[61629]: DEBUG oslo_vmware.api [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354633, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.851289] env[61629]: DEBUG nova.compute.manager [req-f049bb7b-ee51-4571-ba22-0c7642c3e9cc req-0d125d19-7a4c-40e1-b1b3-7ffbcc13b485 service nova] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Received event network-vif-plugged-c58b1d68-4864-433f-abc5-85f78006fa3f {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1068.851289] env[61629]: DEBUG oslo_concurrency.lockutils [req-f049bb7b-ee51-4571-ba22-0c7642c3e9cc req-0d125d19-7a4c-40e1-b1b3-7ffbcc13b485 service nova] Acquiring lock "03327851-5bf7-47f0-b0b2-ce4b763225cb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.851289] env[61629]: DEBUG oslo_concurrency.lockutils [req-f049bb7b-ee51-4571-ba22-0c7642c3e9cc req-0d125d19-7a4c-40e1-b1b3-7ffbcc13b485 service nova] Lock "03327851-5bf7-47f0-b0b2-ce4b763225cb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.851289] env[61629]: DEBUG oslo_concurrency.lockutils [req-f049bb7b-ee51-4571-ba22-0c7642c3e9cc req-0d125d19-7a4c-40e1-b1b3-7ffbcc13b485 service nova] Lock "03327851-5bf7-47f0-b0b2-ce4b763225cb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.851289] env[61629]: DEBUG nova.compute.manager [req-f049bb7b-ee51-4571-ba22-0c7642c3e9cc req-0d125d19-7a4c-40e1-b1b3-7ffbcc13b485 service nova] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] No waiting events found dispatching network-vif-plugged-c58b1d68-4864-433f-abc5-85f78006fa3f {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1068.851289] env[61629]: WARNING nova.compute.manager [req-f049bb7b-ee51-4571-ba22-0c7642c3e9cc req-0d125d19-7a4c-40e1-b1b3-7ffbcc13b485 service nova] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Received unexpected event network-vif-plugged-c58b1d68-4864-433f-abc5-85f78006fa3f for instance with vm_state building and task_state spawning. [ 1068.936274] env[61629]: DEBUG nova.network.neutron [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Successfully updated port: c58b1d68-4864-433f-abc5-85f78006fa3f {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1069.288801] env[61629]: DEBUG oslo_vmware.api [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354633, 'name': ReconfigVM_Task, 'duration_secs': 0.15295} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.289068] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Reconfigured VM instance instance-00000068 to detach disk 2000 {{(pid=61629) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1069.289829] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8546c04a-2c94-40cd-a017-4d6f6c269e6c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.311886] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 843e230d-fb7e-4375-83f4-78fe6d9de9ff/843e230d-fb7e-4375-83f4-78fe6d9de9ff.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1069.312171] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b96e89a1-8499-4335-b163-089b893800dd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.330057] env[61629]: DEBUG oslo_vmware.api [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1069.330057] env[61629]: value = "task-1354634" [ 1069.330057] env[61629]: _type = "Task" [ 1069.330057] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.337668] env[61629]: DEBUG oslo_vmware.api [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354634, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.439299] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Acquiring lock "refresh_cache-03327851-5bf7-47f0-b0b2-ce4b763225cb" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.439477] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Acquired lock "refresh_cache-03327851-5bf7-47f0-b0b2-ce4b763225cb" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.439670] env[61629]: DEBUG nova.network.neutron [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1069.839644] env[61629]: DEBUG oslo_vmware.api [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354634, 'name': ReconfigVM_Task, 'duration_secs': 0.240647} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.839993] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 843e230d-fb7e-4375-83f4-78fe6d9de9ff/843e230d-fb7e-4375-83f4-78fe6d9de9ff.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1069.840230] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updating instance '843e230d-fb7e-4375-83f4-78fe6d9de9ff' progress to 50 {{(pid=61629) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1069.980933] env[61629]: DEBUG nova.network.neutron [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1070.105853] env[61629]: DEBUG nova.network.neutron [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Updating instance_info_cache with network_info: [{"id": "c58b1d68-4864-433f-abc5-85f78006fa3f", "address": "fa:16:3e:4f:ac:a7", "network": {"id": "ec443f22-441f-44a3-bc81-9b1e73b9b917", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1648806816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b4107fc57f54319973ab87715e93e61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc58b1d68-48", "ovs_interfaceid": "c58b1d68-4864-433f-abc5-85f78006fa3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.347417] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d20ead4-c7bb-415d-b910-2a3d0172ec7d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.366907] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993d8220-c2a6-48d5-8365-021f3a3fa68b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.384371] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updating instance '843e230d-fb7e-4375-83f4-78fe6d9de9ff' progress to 67 {{(pid=61629) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1070.608702] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Releasing lock "refresh_cache-03327851-5bf7-47f0-b0b2-ce4b763225cb" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.609038] env[61629]: DEBUG nova.compute.manager [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Instance network_info: |[{"id": "c58b1d68-4864-433f-abc5-85f78006fa3f", "address": "fa:16:3e:4f:ac:a7", "network": {"id": "ec443f22-441f-44a3-bc81-9b1e73b9b917", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1648806816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b4107fc57f54319973ab87715e93e61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc58b1d68-48", "ovs_interfaceid": "c58b1d68-4864-433f-abc5-85f78006fa3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1070.609539] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:ac:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c58b1d68-4864-433f-abc5-85f78006fa3f', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1070.617062] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Creating folder: Project (4b4107fc57f54319973ab87715e93e61). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1070.617320] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5db89c25-0b68-4114-9449-be8c64ed9b03 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.629684] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Created folder: Project (4b4107fc57f54319973ab87715e93e61) in parent group-v288443. [ 1070.629867] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Creating folder: Instances. Parent ref: group-v288586. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1070.630119] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-705215bc-7d55-454e-a71f-09671479c234 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.638817] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Created folder: Instances in parent group-v288586. [ 1070.639053] env[61629]: DEBUG oslo.service.loopingcall [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1070.639245] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1070.639465] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d50c0914-0d12-467a-886c-d0f53e97aa80 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.657831] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1070.657831] env[61629]: value = "task-1354637" [ 1070.657831] env[61629]: _type = "Task" [ 1070.657831] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.664805] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354637, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.884118] env[61629]: DEBUG nova.compute.manager [req-e88f62e4-8f9b-4e9b-aec2-d1cb1a556472 req-8c2a4549-5c30-4a05-bb8f-f62e3c1dc37c service nova] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Received event network-changed-c58b1d68-4864-433f-abc5-85f78006fa3f {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1070.884405] env[61629]: DEBUG nova.compute.manager [req-e88f62e4-8f9b-4e9b-aec2-d1cb1a556472 req-8c2a4549-5c30-4a05-bb8f-f62e3c1dc37c service nova] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Refreshing instance network info cache due to event network-changed-c58b1d68-4864-433f-abc5-85f78006fa3f. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1070.884582] env[61629]: DEBUG oslo_concurrency.lockutils [req-e88f62e4-8f9b-4e9b-aec2-d1cb1a556472 req-8c2a4549-5c30-4a05-bb8f-f62e3c1dc37c service nova] Acquiring lock "refresh_cache-03327851-5bf7-47f0-b0b2-ce4b763225cb" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1070.884911] env[61629]: DEBUG oslo_concurrency.lockutils [req-e88f62e4-8f9b-4e9b-aec2-d1cb1a556472 req-8c2a4549-5c30-4a05-bb8f-f62e3c1dc37c service nova] Acquired lock "refresh_cache-03327851-5bf7-47f0-b0b2-ce4b763225cb" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.884911] env[61629]: DEBUG nova.network.neutron [req-e88f62e4-8f9b-4e9b-aec2-d1cb1a556472 req-8c2a4549-5c30-4a05-bb8f-f62e3c1dc37c service nova] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Refreshing network info cache for port c58b1d68-4864-433f-abc5-85f78006fa3f {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1070.924943] env[61629]: DEBUG nova.network.neutron [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Port 4f1807c4-68f3-46c7-939e-507baf0b7c79 binding to destination host cpu-1 is already ACTIVE {{(pid=61629) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1071.168378] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354637, 'name': CreateVM_Task, 'duration_secs': 0.285137} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.168567] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1071.169185] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.169390] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.169736] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1071.169988] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4481c16c-6d1a-4bdd-80ea-3b813a8f2934 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.173985] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Waiting for the task: (returnval){ [ 1071.173985] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52ae51bc-a2d7-1c7b-7975-67342d589961" [ 1071.173985] env[61629]: _type = "Task" [ 1071.173985] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.180993] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52ae51bc-a2d7-1c7b-7975-67342d589961, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.593255] env[61629]: DEBUG nova.network.neutron [req-e88f62e4-8f9b-4e9b-aec2-d1cb1a556472 req-8c2a4549-5c30-4a05-bb8f-f62e3c1dc37c service nova] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Updated VIF entry in instance network info cache for port c58b1d68-4864-433f-abc5-85f78006fa3f. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1071.593788] env[61629]: DEBUG nova.network.neutron [req-e88f62e4-8f9b-4e9b-aec2-d1cb1a556472 req-8c2a4549-5c30-4a05-bb8f-f62e3c1dc37c service nova] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Updating instance_info_cache with network_info: [{"id": "c58b1d68-4864-433f-abc5-85f78006fa3f", "address": "fa:16:3e:4f:ac:a7", "network": {"id": "ec443f22-441f-44a3-bc81-9b1e73b9b917", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1648806816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b4107fc57f54319973ab87715e93e61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc58b1d68-48", "ovs_interfaceid": "c58b1d68-4864-433f-abc5-85f78006fa3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.683760] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52ae51bc-a2d7-1c7b-7975-67342d589961, 'name': SearchDatastore_Task, 'duration_secs': 0.010405} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.684169] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1071.684474] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1071.684754] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.684942] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.685174] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1071.685468] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d04347b-2a18-4e3f-8704-f7b4c480283c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.693105] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1071.693331] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1071.694045] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c47f7f13-fd53-48d5-ba3e-2ee0c950d344 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.700341] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Waiting for the task: (returnval){ [ 1071.700341] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52a24fd0-39eb-930d-406a-b3c3309e0606" [ 1071.700341] env[61629]: _type = "Task" [ 1071.700341] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.707642] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52a24fd0-39eb-930d-406a-b3c3309e0606, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.942556] env[61629]: DEBUG oslo_concurrency.lockutils [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.942863] env[61629]: DEBUG oslo_concurrency.lockutils [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.943065] env[61629]: DEBUG oslo_concurrency.lockutils [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.096797] env[61629]: DEBUG oslo_concurrency.lockutils [req-e88f62e4-8f9b-4e9b-aec2-d1cb1a556472 req-8c2a4549-5c30-4a05-bb8f-f62e3c1dc37c service nova] Releasing lock "refresh_cache-03327851-5bf7-47f0-b0b2-ce4b763225cb" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.211134] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52a24fd0-39eb-930d-406a-b3c3309e0606, 'name': SearchDatastore_Task, 'duration_secs': 0.008354} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.211930] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15881ae0-4db4-4dde-9dc2-8d948c13bd85 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.217641] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Waiting for the task: (returnval){ [ 1072.217641] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5286896c-b3ff-37b1-f3ed-cc6b6e623e18" [ 1072.217641] env[61629]: _type = "Task" [ 1072.217641] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.225068] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5286896c-b3ff-37b1-f3ed-cc6b6e623e18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.727773] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5286896c-b3ff-37b1-f3ed-cc6b6e623e18, 'name': SearchDatastore_Task, 'duration_secs': 0.008915} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.728055] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.728323] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 03327851-5bf7-47f0-b0b2-ce4b763225cb/03327851-5bf7-47f0-b0b2-ce4b763225cb.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1072.728580] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78be51a6-b02d-44ea-b958-f2ee34639065 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.735432] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Waiting for the task: (returnval){ [ 1072.735432] env[61629]: value = "task-1354638" [ 1072.735432] env[61629]: _type = "Task" [ 1072.735432] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.742451] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': task-1354638, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.981923] env[61629]: DEBUG oslo_concurrency.lockutils [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "refresh_cache-843e230d-fb7e-4375-83f4-78fe6d9de9ff" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1072.982237] env[61629]: DEBUG oslo_concurrency.lockutils [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "refresh_cache-843e230d-fb7e-4375-83f4-78fe6d9de9ff" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.982394] env[61629]: DEBUG nova.network.neutron [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1073.244566] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': task-1354638, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467696} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.244787] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] 03327851-5bf7-47f0-b0b2-ce4b763225cb/03327851-5bf7-47f0-b0b2-ce4b763225cb.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1073.245000] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1073.245269] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b0b9b81-5112-44c4-96f8-867614b3bbf2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.252390] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Waiting for the task: (returnval){ [ 1073.252390] env[61629]: value = "task-1354639" [ 1073.252390] env[61629]: _type = "Task" [ 1073.252390] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.259119] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': task-1354639, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.661768] env[61629]: DEBUG nova.network.neutron [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updating instance_info_cache with network_info: [{"id": "4f1807c4-68f3-46c7-939e-507baf0b7c79", "address": "fa:16:3e:ca:02:a7", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f1807c4-68", "ovs_interfaceid": "4f1807c4-68f3-46c7-939e-507baf0b7c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.762191] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': task-1354639, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060987} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.762423] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1073.763191] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b49592f-c335-45b8-97f3-7df449cd4dbd {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.785734] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 03327851-5bf7-47f0-b0b2-ce4b763225cb/03327851-5bf7-47f0-b0b2-ce4b763225cb.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1073.785982] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef071265-8294-4672-87bb-5fe5046fae12 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.805090] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Waiting for the task: (returnval){ [ 1073.805090] env[61629]: value = "task-1354640" [ 1073.805090] env[61629]: _type = "Task" [ 1073.805090] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.812509] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': task-1354640, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.164722] env[61629]: DEBUG oslo_concurrency.lockutils [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "refresh_cache-843e230d-fb7e-4375-83f4-78fe6d9de9ff" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1074.314547] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': task-1354640, 'name': ReconfigVM_Task, 'duration_secs': 0.255783} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.314840] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 03327851-5bf7-47f0-b0b2-ce4b763225cb/03327851-5bf7-47f0-b0b2-ce4b763225cb.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1074.315473] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b7cca63-d18a-4f0f-82d6-040a5cc511a8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.321026] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Waiting for the task: (returnval){ [ 1074.321026] env[61629]: value = "task-1354641" [ 1074.321026] env[61629]: _type = "Task" [ 1074.321026] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.328068] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': task-1354641, 'name': Rename_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.690207] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a73c47-9457-468a-9e28-5255427a8a50 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.708677] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02390a2b-238c-4793-ba22-c90cd7779560 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.715297] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updating instance '843e230d-fb7e-4375-83f4-78fe6d9de9ff' progress to 83 {{(pid=61629) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1074.830771] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': task-1354641, 'name': Rename_Task, 'duration_secs': 0.14976} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.831016] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1074.831271] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ceb3a559-29db-4d4c-96ce-9d2ba4b2bc31 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.836980] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Waiting for the task: (returnval){ [ 1074.836980] env[61629]: value = "task-1354642" [ 1074.836980] env[61629]: _type = "Task" [ 1074.836980] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.843947] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': task-1354642, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.221441] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1075.221964] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56e30f2d-61de-477b-be92-74334ca72188 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.228979] env[61629]: DEBUG oslo_vmware.api [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1075.228979] env[61629]: value = "task-1354643" [ 1075.228979] env[61629]: _type = "Task" [ 1075.228979] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.242198] env[61629]: DEBUG oslo_vmware.api [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354643, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.346619] env[61629]: DEBUG oslo_vmware.api [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': task-1354642, 'name': PowerOnVM_Task, 'duration_secs': 0.410923} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.346905] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1075.347131] env[61629]: INFO nova.compute.manager [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Took 6.66 seconds to spawn the instance on the hypervisor. [ 1075.347352] env[61629]: DEBUG nova.compute.manager [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1075.348110] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f847dd9-4899-4071-9b91-5dbc39b2478a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.739132] env[61629]: DEBUG oslo_vmware.api [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354643, 'name': PowerOnVM_Task, 'duration_secs': 0.432965} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.739448] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1075.739647] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-641a530e-79b6-4c05-8dc0-76a30fa08f91 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updating instance '843e230d-fb7e-4375-83f4-78fe6d9de9ff' progress to 100 {{(pid=61629) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1075.863893] env[61629]: INFO nova.compute.manager [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Took 12.36 seconds to build instance. [ 1076.366520] env[61629]: DEBUG oslo_concurrency.lockutils [None req-ad8b12a5-7b6d-4b34-8833-686aa94bb4c1 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Lock "03327851-5bf7-47f0-b0b2-ce4b763225cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.873s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.627787] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Acquiring lock "03327851-5bf7-47f0-b0b2-ce4b763225cb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.628167] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Lock "03327851-5bf7-47f0-b0b2-ce4b763225cb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.628405] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Acquiring lock "03327851-5bf7-47f0-b0b2-ce4b763225cb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.628618] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Lock "03327851-5bf7-47f0-b0b2-ce4b763225cb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.628802] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Lock "03327851-5bf7-47f0-b0b2-ce4b763225cb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.631181] env[61629]: INFO nova.compute.manager [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Terminating instance [ 1076.633094] env[61629]: DEBUG nova.compute.manager [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1076.633331] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1076.634259] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98a5a9f-8b2c-46bc-a3c0-96fba0e346b1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.642562] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1076.642816] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f5f74a7-033e-406b-b80a-313b46aa00a7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.649285] env[61629]: DEBUG oslo_vmware.api [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Waiting for the task: (returnval){ [ 1076.649285] env[61629]: value = "task-1354644" [ 1076.649285] env[61629]: _type = "Task" [ 1076.649285] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.657514] env[61629]: DEBUG oslo_vmware.api [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': task-1354644, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.159764] env[61629]: DEBUG oslo_vmware.api [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': task-1354644, 'name': PowerOffVM_Task, 'duration_secs': 0.219987} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.160036] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1077.160215] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1077.160472] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03b79644-3a0c-4383-9164-183e0bcdfd1d {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.227604] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1077.227951] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1077.228267] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Deleting the datastore file [datastore2] 03327851-5bf7-47f0-b0b2-ce4b763225cb {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1077.228557] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c00a5eb0-c402-4f16-91d8-ff2c5ca33191 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.234606] env[61629]: DEBUG oslo_vmware.api [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Waiting for the task: (returnval){ [ 1077.234606] env[61629]: value = "task-1354646" [ 1077.234606] env[61629]: _type = "Task" [ 1077.234606] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.242760] env[61629]: DEBUG oslo_vmware.api [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': task-1354646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.745685] env[61629]: DEBUG oslo_vmware.api [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Task: {'id': task-1354646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132318} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.746140] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1077.746289] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1077.746479] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1077.746659] env[61629]: INFO nova.compute.manager [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1077.746911] env[61629]: DEBUG oslo.service.loopingcall [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1077.747129] env[61629]: DEBUG nova.compute.manager [-] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1077.747269] env[61629]: DEBUG nova.network.neutron [-] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1078.030302] env[61629]: DEBUG nova.compute.manager [req-29312cf5-88fb-40c5-a9df-41f7b13fceb7 req-b3a99ef8-cf48-4fde-b723-195d842d4b5d service nova] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Received event network-vif-deleted-c58b1d68-4864-433f-abc5-85f78006fa3f {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1078.030566] env[61629]: INFO nova.compute.manager [req-29312cf5-88fb-40c5-a9df-41f7b13fceb7 req-b3a99ef8-cf48-4fde-b723-195d842d4b5d service nova] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Neutron deleted interface c58b1d68-4864-433f-abc5-85f78006fa3f; detaching it from the instance and deleting it from the info cache [ 1078.030760] env[61629]: DEBUG nova.network.neutron [req-29312cf5-88fb-40c5-a9df-41f7b13fceb7 req-b3a99ef8-cf48-4fde-b723-195d842d4b5d service nova] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.500605] env[61629]: DEBUG nova.network.neutron [-] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.532696] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-558f9d42-775c-42d8-a302-ccc39159cf30 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.542877] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09929076-be75-4261-894e-afb69507d420 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.565802] env[61629]: DEBUG nova.compute.manager [req-29312cf5-88fb-40c5-a9df-41f7b13fceb7 req-b3a99ef8-cf48-4fde-b723-195d842d4b5d service nova] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Detach interface failed, port_id=c58b1d68-4864-433f-abc5-85f78006fa3f, reason: Instance 03327851-5bf7-47f0-b0b2-ce4b763225cb could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1078.579718] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.579964] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.580174] env[61629]: DEBUG nova.compute.manager [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Going to confirm migration 2 {{(pid=61629) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1079.003349] env[61629]: INFO nova.compute.manager [-] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Took 1.26 seconds to deallocate network for instance. [ 1079.144668] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "refresh_cache-843e230d-fb7e-4375-83f4-78fe6d9de9ff" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.144868] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquired lock "refresh_cache-843e230d-fb7e-4375-83f4-78fe6d9de9ff" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.145061] env[61629]: DEBUG nova.network.neutron [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1079.145257] env[61629]: DEBUG nova.objects.instance [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lazy-loading 'info_cache' on Instance uuid 843e230d-fb7e-4375-83f4-78fe6d9de9ff {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.509604] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.509888] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.510129] env[61629]: DEBUG nova.objects.instance [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Lazy-loading 'resources' on Instance uuid 03327851-5bf7-47f0-b0b2-ce4b763225cb {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1080.064216] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d40309b-e7b1-4f37-9ff4-fdca3abc08d0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.071873] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af15aa0-0230-4ed4-ab6d-3fe65dae3164 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.101675] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46dcca7-6ce8-44bc-b732-17fbd269c4d6 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.108557] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41dc3bb3-bbcf-4a8b-83d4-6020742bafe0 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.121279] env[61629]: DEBUG nova.compute.provider_tree [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.332828] env[61629]: DEBUG nova.network.neutron [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updating instance_info_cache with network_info: [{"id": "4f1807c4-68f3-46c7-939e-507baf0b7c79", "address": "fa:16:3e:ca:02:a7", "network": {"id": "03610486-2741-491e-a62d-a51579315e5a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1394073503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38efdd2cc07f45a49fb06d590aafb96b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f1807c4-68", "ovs_interfaceid": "4f1807c4-68f3-46c7-939e-507baf0b7c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.624293] env[61629]: DEBUG nova.scheduler.client.report [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1080.835304] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Releasing lock "refresh_cache-843e230d-fb7e-4375-83f4-78fe6d9de9ff" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.835542] env[61629]: DEBUG nova.objects.instance [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lazy-loading 'migration_context' on Instance uuid 843e230d-fb7e-4375-83f4-78fe6d9de9ff {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.129976] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.620s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.151943] env[61629]: INFO nova.scheduler.client.report [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Deleted allocations for instance 03327851-5bf7-47f0-b0b2-ce4b763225cb [ 1081.338954] env[61629]: DEBUG nova.objects.base [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Object Instance<843e230d-fb7e-4375-83f4-78fe6d9de9ff> lazy-loaded attributes: info_cache,migration_context {{(pid=61629) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1081.340068] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd1db9d-9155-4459-8279-08a145a1bead {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.359254] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7d98185-ccc3-4ec4-91cb-3196ac809f63 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.364859] env[61629]: DEBUG oslo_vmware.api [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1081.364859] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]525ce424-a8d2-8414-e195-77db387ddf49" [ 1081.364859] env[61629]: _type = "Task" [ 1081.364859] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.373970] env[61629]: DEBUG oslo_vmware.api [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]525ce424-a8d2-8414-e195-77db387ddf49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.660238] env[61629]: DEBUG oslo_concurrency.lockutils [None req-c0ee5293-a104-4262-ae46-b9f60569c1d0 tempest-ServerGroupTestJSON-1476649875 tempest-ServerGroupTestJSON-1476649875-project-member] Lock "03327851-5bf7-47f0-b0b2-ce4b763225cb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.031s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.874836] env[61629]: DEBUG oslo_vmware.api [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]525ce424-a8d2-8414-e195-77db387ddf49, 'name': SearchDatastore_Task, 'duration_secs': 0.009713} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.875156] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.875392] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.424562] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57affe4a-9157-4134-9284-6a0fbb2c9184 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.432698] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f363cbb5-e740-4452-bade-e21c76abd8db {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.465876] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f3e3f9-efa8-47af-87f7-ef832b94c332 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.473434] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1f4edc-8e82-4c65-b833-a05008f6cf77 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.486150] env[61629]: DEBUG nova.compute.provider_tree [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.990050] env[61629]: DEBUG nova.scheduler.client.report [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1084.001736] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.126s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.560080] env[61629]: INFO nova.scheduler.client.report [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleted allocation for migration c8908495-b24e-45fe-bf2f-930f99e91822 [ 1085.067251] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.487s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.062806] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquiring lock "ead4b4b6-05e9-404e-9127-90b3395e47e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.062806] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Lock "ead4b4b6-05e9-404e-9127-90b3395e47e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.106447] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.106753] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.107597] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.107597] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.107597] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.109353] env[61629]: INFO nova.compute.manager [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Terminating instance [ 1086.111178] env[61629]: DEBUG nova.compute.manager [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1086.111383] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1086.112238] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e21029-cb02-425a-80bc-14f689d6cbfe {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.120065] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1086.120311] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa6177b6-3226-4185-bb45-a17c8973f224 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.126278] env[61629]: DEBUG oslo_vmware.api [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1086.126278] env[61629]: value = "task-1354647" [ 1086.126278] env[61629]: _type = "Task" [ 1086.126278] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.134601] env[61629]: DEBUG oslo_vmware.api [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354647, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.565405] env[61629]: DEBUG nova.compute.manager [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Starting instance... {{(pid=61629) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1086.636415] env[61629]: DEBUG oslo_vmware.api [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354647, 'name': PowerOffVM_Task, 'duration_secs': 0.196744} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.636688] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1086.636859] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1086.637121] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-526c26e3-209d-4fa2-afa3-13619e04ac73 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.929021] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1086.929263] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1086.929547] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleting the datastore file [datastore2] 843e230d-fb7e-4375-83f4-78fe6d9de9ff {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1086.929821] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c93ccbc-17fb-465c-874a-184037e9a872 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.935822] env[61629]: DEBUG oslo_vmware.api [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for the task: (returnval){ [ 1086.935822] env[61629]: value = "task-1354649" [ 1086.935822] env[61629]: _type = "Task" [ 1086.935822] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.944733] env[61629]: DEBUG oslo_vmware.api [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354649, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.088082] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.088363] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.089904] env[61629]: INFO nova.compute.claims [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1087.446749] env[61629]: DEBUG oslo_vmware.api [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Task: {'id': task-1354649, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13575} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.447119] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1087.447204] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1087.447414] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1087.447601] env[61629]: INFO nova.compute.manager [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Took 1.34 seconds to destroy the instance on the hypervisor. [ 1087.447844] env[61629]: DEBUG oslo.service.loopingcall [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1087.448052] env[61629]: DEBUG nova.compute.manager [-] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1087.448147] env[61629]: DEBUG nova.network.neutron [-] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1087.706719] env[61629]: DEBUG nova.compute.manager [req-b24babfc-6835-47f0-a39e-169f9687f18b req-b2b3ae1c-4d18-4484-9511-579e8b2f3818 service nova] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Received event network-vif-deleted-4f1807c4-68f3-46c7-939e-507baf0b7c79 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1087.706947] env[61629]: INFO nova.compute.manager [req-b24babfc-6835-47f0-a39e-169f9687f18b req-b2b3ae1c-4d18-4484-9511-579e8b2f3818 service nova] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Neutron deleted interface 4f1807c4-68f3-46c7-939e-507baf0b7c79; detaching it from the instance and deleting it from the info cache [ 1087.707123] env[61629]: DEBUG nova.network.neutron [req-b24babfc-6835-47f0-a39e-169f9687f18b req-b2b3ae1c-4d18-4484-9511-579e8b2f3818 service nova] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.135616] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd6d0dc-beb2-4bdf-9662-f1bf32a764c5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.143095] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e269c089-43f2-4c4a-8876-9959e9b904e9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.172834] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efda95cf-4700-49e8-9d77-ab6cdb2bec86 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.180387] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8095c89-3a91-4998-8d56-bdab07bea714 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.192663] env[61629]: DEBUG nova.network.neutron [-] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.194432] env[61629]: DEBUG nova.compute.provider_tree [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.209558] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2963e8a4-8e03-4f3c-8c70-b5cd5ca2014c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.218855] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a61e7f-edec-4bc2-9c90-8ace259662cc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.242156] env[61629]: DEBUG nova.compute.manager [req-b24babfc-6835-47f0-a39e-169f9687f18b req-b2b3ae1c-4d18-4484-9511-579e8b2f3818 service nova] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Detach interface failed, port_id=4f1807c4-68f3-46c7-939e-507baf0b7c79, reason: Instance 843e230d-fb7e-4375-83f4-78fe6d9de9ff could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1088.696663] env[61629]: INFO nova.compute.manager [-] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Took 1.25 seconds to deallocate network for instance. [ 1088.699631] env[61629]: DEBUG nova.scheduler.client.report [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1089.206688] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.118s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.208028] env[61629]: DEBUG nova.compute.manager [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Start building networks asynchronously for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1089.211025] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.211284] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.211477] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.229667] env[61629]: INFO nova.scheduler.client.report [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Deleted allocations for instance 843e230d-fb7e-4375-83f4-78fe6d9de9ff [ 1089.684828] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.685081] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.716064] env[61629]: DEBUG nova.compute.utils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Using /dev/sd instead of None {{(pid=61629) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1089.718084] env[61629]: DEBUG nova.compute.manager [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Allocating IP information in the background. {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1089.718084] env[61629]: DEBUG nova.network.neutron [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] allocate_for_instance() {{(pid=61629) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1089.736997] env[61629]: DEBUG oslo_concurrency.lockutils [None req-e6c6dba1-c020-4c40-ab27-63ed44157fc7 tempest-DeleteServersTestJSON-2006748920 tempest-DeleteServersTestJSON-2006748920-project-member] Lock "843e230d-fb7e-4375-83f4-78fe6d9de9ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.630s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.763794] env[61629]: DEBUG nova.policy [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8344ec002a44d2f9f8d5f28e13b5fdc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65277daeb9e34c0b885893cd19d978b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61629) authorize /opt/stack/nova/nova/policy.py:201}} [ 1090.003497] env[61629]: DEBUG nova.network.neutron [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Successfully created port: f6f32347-1a23-4769-af62-856db20d9ff7 {{(pid=61629) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1090.189334] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.189517] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Starting heal instance info cache {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1090.189636] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Rebuilding the list of instances to heal {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1090.221645] env[61629]: DEBUG nova.compute.manager [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Start building block device mappings for instance. {{(pid=61629) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1090.692578] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Skipping network cache update for instance because it is Building. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1090.692817] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Didn't find any instances for network info cache update. {{(pid=61629) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1090.692967] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.693141] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.693304] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.693466] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.693603] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.693743] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.693873] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61629) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1090.694020] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager.update_available_resource {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1091.197064] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.197475] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.197522] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.197657] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61629) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1091.198799] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8488efef-5f12-46a5-a51e-4ca81808b004 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.207089] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df367cc5-0341-476e-b914-351ba03f12f2 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.221579] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d12ead-736e-432a-9d6e-9945d0320a57 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.227739] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89af66e4-913c-4cae-99ee-f328eb95a708 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.231765] env[61629]: DEBUG nova.compute.manager [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Start spawning the instance on the hypervisor. {{(pid=61629) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1091.262043] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180823MB free_disk=151GB free_vcpus=48 pci_devices=None {{(pid=61629) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1091.262043] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.262043] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.271891] env[61629]: DEBUG nova.virt.hardware [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-24T12:53:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-24T12:53:37Z,direct_url=,disk_format='vmdk',id=7f036972-f3d8-47df-ae86-f8f2844bf80c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9a36b70b3bef49e68cbe43ec3eaa5dc8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-24T12:53:38Z,virtual_size=,visibility=), allow threads: False {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1091.272153] env[61629]: DEBUG nova.virt.hardware [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Flavor limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1091.272318] env[61629]: DEBUG nova.virt.hardware [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Image limits 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1091.272509] env[61629]: DEBUG nova.virt.hardware [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Flavor pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1091.272686] env[61629]: DEBUG nova.virt.hardware [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Image pref 0:0:0 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1091.272840] env[61629]: DEBUG nova.virt.hardware [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61629) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1091.273074] env[61629]: DEBUG nova.virt.hardware [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1091.273242] env[61629]: DEBUG nova.virt.hardware [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1091.273415] env[61629]: DEBUG nova.virt.hardware [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Got 1 possible topologies {{(pid=61629) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1091.273642] env[61629]: DEBUG nova.virt.hardware [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1091.273753] env[61629]: DEBUG nova.virt.hardware [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61629) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1091.274962] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f59da4-49a6-4245-a6aa-f94cfb2ef18b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.283614] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f898801b-f8ef-46ac-b058-c328f78f2472 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.387938] env[61629]: DEBUG nova.compute.manager [req-2f8cc343-2756-4209-92d2-d8a455e4bae3 req-5157c965-9b9d-4669-8156-bfb377b8c69a service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Received event network-vif-plugged-f6f32347-1a23-4769-af62-856db20d9ff7 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1091.388397] env[61629]: DEBUG oslo_concurrency.lockutils [req-2f8cc343-2756-4209-92d2-d8a455e4bae3 req-5157c965-9b9d-4669-8156-bfb377b8c69a service nova] Acquiring lock "ead4b4b6-05e9-404e-9127-90b3395e47e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.388564] env[61629]: DEBUG oslo_concurrency.lockutils [req-2f8cc343-2756-4209-92d2-d8a455e4bae3 req-5157c965-9b9d-4669-8156-bfb377b8c69a service nova] Lock "ead4b4b6-05e9-404e-9127-90b3395e47e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.388706] env[61629]: DEBUG oslo_concurrency.lockutils [req-2f8cc343-2756-4209-92d2-d8a455e4bae3 req-5157c965-9b9d-4669-8156-bfb377b8c69a service nova] Lock "ead4b4b6-05e9-404e-9127-90b3395e47e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.388892] env[61629]: DEBUG nova.compute.manager [req-2f8cc343-2756-4209-92d2-d8a455e4bae3 req-5157c965-9b9d-4669-8156-bfb377b8c69a service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] No waiting events found dispatching network-vif-plugged-f6f32347-1a23-4769-af62-856db20d9ff7 {{(pid=61629) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1091.389082] env[61629]: WARNING nova.compute.manager [req-2f8cc343-2756-4209-92d2-d8a455e4bae3 req-5157c965-9b9d-4669-8156-bfb377b8c69a service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Received unexpected event network-vif-plugged-f6f32347-1a23-4769-af62-856db20d9ff7 for instance with vm_state building and task_state spawning. [ 1091.468223] env[61629]: DEBUG nova.network.neutron [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Successfully updated port: f6f32347-1a23-4769-af62-856db20d9ff7 {{(pid=61629) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1091.970787] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquiring lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.971014] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquired lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.971100] env[61629]: DEBUG nova.network.neutron [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1092.369350] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Instance ead4b4b6-05e9-404e-9127-90b3395e47e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61629) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1092.369593] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1092.369729] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=61629) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1092.397055] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f431e5e2-5d16-4b69-a0c7-9f914965012e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.406639] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc69222-2920-4824-8821-10fe4b2dab7b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.435963] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc06495-55a5-426d-a804-8bf1c6ee06eb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.443214] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7dfa07-fa11-43ac-b28b-b3304c815466 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.455715] env[61629]: DEBUG nova.compute.provider_tree [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.502848] env[61629]: DEBUG nova.network.neutron [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Instance cache missing network info. {{(pid=61629) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1092.615259] env[61629]: DEBUG nova.network.neutron [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Updating instance_info_cache with network_info: [{"id": "f6f32347-1a23-4769-af62-856db20d9ff7", "address": "fa:16:3e:45:78:b9", "network": {"id": "6d42a9ec-1547-45b7-81be-bce10dd9eef0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-853038587-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "65277daeb9e34c0b885893cd19d978b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6f32347-1a", "ovs_interfaceid": "f6f32347-1a23-4769-af62-856db20d9ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.958942] env[61629]: DEBUG nova.scheduler.client.report [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1093.118427] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Releasing lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.118779] env[61629]: DEBUG nova.compute.manager [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Instance network_info: |[{"id": "f6f32347-1a23-4769-af62-856db20d9ff7", "address": "fa:16:3e:45:78:b9", "network": {"id": "6d42a9ec-1547-45b7-81be-bce10dd9eef0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-853038587-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "65277daeb9e34c0b885893cd19d978b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6f32347-1a", "ovs_interfaceid": "f6f32347-1a23-4769-af62-856db20d9ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61629) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1093.119342] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:78:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '816c6e38-e200-4544-8c5b-9fc3e16c5761', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6f32347-1a23-4769-af62-856db20d9ff7', 'vif_model': 'vmxnet3'}] {{(pid=61629) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1093.126721] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Creating folder: Project (65277daeb9e34c0b885893cd19d978b9). Parent ref: group-v288443. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1093.126987] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ea6f92e7-275a-4704-b3c9-e888c184ba51 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.139362] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Created folder: Project (65277daeb9e34c0b885893cd19d978b9) in parent group-v288443. [ 1093.139554] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Creating folder: Instances. Parent ref: group-v288589. {{(pid=61629) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1093.139779] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-42cebb44-8f52-4ebb-b5ca-aa09297a54e7 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.148590] env[61629]: INFO nova.virt.vmwareapi.vm_util [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Created folder: Instances in parent group-v288589. [ 1093.148818] env[61629]: DEBUG oslo.service.loopingcall [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1093.149013] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Creating VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1093.149220] env[61629]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4816412-039d-4b6b-8786-2f7018e19290 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.167796] env[61629]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1093.167796] env[61629]: value = "task-1354653" [ 1093.167796] env[61629]: _type = "Task" [ 1093.167796] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.174873] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354653, 'name': CreateVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.414153] env[61629]: DEBUG nova.compute.manager [req-bbea453b-6a44-43d7-aaa7-aa674c311e5a req-df66ea8e-b125-4be1-84d0-aaf327be8c1c service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Received event network-changed-f6f32347-1a23-4769-af62-856db20d9ff7 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1093.414153] env[61629]: DEBUG nova.compute.manager [req-bbea453b-6a44-43d7-aaa7-aa674c311e5a req-df66ea8e-b125-4be1-84d0-aaf327be8c1c service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Refreshing instance network info cache due to event network-changed-f6f32347-1a23-4769-af62-856db20d9ff7. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1093.414153] env[61629]: DEBUG oslo_concurrency.lockutils [req-bbea453b-6a44-43d7-aaa7-aa674c311e5a req-df66ea8e-b125-4be1-84d0-aaf327be8c1c service nova] Acquiring lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.414153] env[61629]: DEBUG oslo_concurrency.lockutils [req-bbea453b-6a44-43d7-aaa7-aa674c311e5a req-df66ea8e-b125-4be1-84d0-aaf327be8c1c service nova] Acquired lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.414472] env[61629]: DEBUG nova.network.neutron [req-bbea453b-6a44-43d7-aaa7-aa674c311e5a req-df66ea8e-b125-4be1-84d0-aaf327be8c1c service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Refreshing network info cache for port f6f32347-1a23-4769-af62-856db20d9ff7 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1093.463550] env[61629]: DEBUG nova.compute.resource_tracker [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61629) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1093.463824] env[61629]: DEBUG oslo_concurrency.lockutils [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.202s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.464078] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.464223] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Cleaning up deleted instances {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1093.681802] env[61629]: DEBUG oslo_vmware.api [-] Task: {'id': task-1354653, 'name': CreateVM_Task, 'duration_secs': 0.288164} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.681802] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Created VM on the ESX host {{(pid=61629) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1093.681802] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.681802] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.681802] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1093.681802] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6eab017-00dd-4401-a94c-7ec8f8d5b167 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.687017] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1093.687017] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52db01ef-4cbe-b289-5076-acd1a92340d7" [ 1093.687017] env[61629]: _type = "Task" [ 1093.687017] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.691849] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52db01ef-4cbe-b289-5076-acd1a92340d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.976329] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] There are 43 instances to clean {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1093.976614] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 03327851-5bf7-47f0-b0b2-ce4b763225cb] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1094.114264] env[61629]: DEBUG nova.network.neutron [req-bbea453b-6a44-43d7-aaa7-aa674c311e5a req-df66ea8e-b125-4be1-84d0-aaf327be8c1c service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Updated VIF entry in instance network info cache for port f6f32347-1a23-4769-af62-856db20d9ff7. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1094.114618] env[61629]: DEBUG nova.network.neutron [req-bbea453b-6a44-43d7-aaa7-aa674c311e5a req-df66ea8e-b125-4be1-84d0-aaf327be8c1c service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Updating instance_info_cache with network_info: [{"id": "f6f32347-1a23-4769-af62-856db20d9ff7", "address": "fa:16:3e:45:78:b9", "network": {"id": "6d42a9ec-1547-45b7-81be-bce10dd9eef0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-853038587-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "65277daeb9e34c0b885893cd19d978b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6f32347-1a", "ovs_interfaceid": "f6f32347-1a23-4769-af62-856db20d9ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.194497] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52db01ef-4cbe-b289-5076-acd1a92340d7, 'name': SearchDatastore_Task, 'duration_secs': 0.008353} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.194834] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1094.195086] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1094.195327] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1094.195504] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.195706] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1094.195993] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebb5801c-9255-4cbf-aa31-7468f4c0e73b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.203775] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1094.203959] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1094.204680] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-589bb667-e338-4178-adf3-5cea200750d8 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.209718] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1094.209718] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]5262dd08-2a52-6826-5da4-b0f856d9d5ad" [ 1094.209718] env[61629]: _type = "Task" [ 1094.209718] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.217264] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5262dd08-2a52-6826-5da4-b0f856d9d5ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.479850] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 843e230d-fb7e-4375-83f4-78fe6d9de9ff] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1094.618217] env[61629]: DEBUG oslo_concurrency.lockutils [req-bbea453b-6a44-43d7-aaa7-aa674c311e5a req-df66ea8e-b125-4be1-84d0-aaf327be8c1c service nova] Releasing lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1094.720580] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]5262dd08-2a52-6826-5da4-b0f856d9d5ad, 'name': SearchDatastore_Task, 'duration_secs': 0.008305} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.721445] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7003ac84-b1e0-4f3c-9e20-3491a01e645e {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.726671] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1094.726671] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]522f0478-5bc6-edca-25a6-6235d337f475" [ 1094.726671] env[61629]: _type = "Task" [ 1094.726671] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.735716] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]522f0478-5bc6-edca-25a6-6235d337f475, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.983329] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 88cf532c-b13b-4c27-8637-d24bb6d73b82] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1095.236652] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]522f0478-5bc6-edca-25a6-6235d337f475, 'name': SearchDatastore_Task, 'duration_secs': 0.026891} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.236956] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.237217] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] ead4b4b6-05e9-404e-9127-90b3395e47e7/ead4b4b6-05e9-404e-9127-90b3395e47e7.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1095.238029] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-156f3057-5923-45f2-996b-01bccb693c18 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.244147] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1095.244147] env[61629]: value = "task-1354654" [ 1095.244147] env[61629]: _type = "Task" [ 1095.244147] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.253298] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354654, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.486605] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 3b116c59-a904-4b68-9c74-58954b3de240] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1095.754955] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354654, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501947} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.755272] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] ead4b4b6-05e9-404e-9127-90b3395e47e7/ead4b4b6-05e9-404e-9127-90b3395e47e7.vmdk {{(pid=61629) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1095.755462] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Extending root virtual disk to 1048576 {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1095.755794] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5aaf7143-3fba-4134-a56a-021459b811c5 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.763027] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1095.763027] env[61629]: value = "task-1354655" [ 1095.763027] env[61629]: _type = "Task" [ 1095.763027] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.773239] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354655, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.989972] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: c99de956-c382-4203-b2a7-d3f8709d188a] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1096.273093] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354655, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.200048} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.273389] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Extended root virtual disk {{(pid=61629) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1096.274184] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efef82a1-e4af-473d-b9b9-cac21a6eb36c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.295536] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] ead4b4b6-05e9-404e-9127-90b3395e47e7/ead4b4b6-05e9-404e-9127-90b3395e47e7.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1096.295826] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55c2e06d-5b11-41be-b528-6732cc9c154f {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.315409] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1096.315409] env[61629]: value = "task-1354656" [ 1096.315409] env[61629]: _type = "Task" [ 1096.315409] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.323229] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354656, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.493185] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 7da77dea-fea2-43a6-a98a-6c492d1a041b] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1096.824867] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354656, 'name': ReconfigVM_Task, 'duration_secs': 0.284554} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.825175] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Reconfigured VM instance instance-0000006a to attach disk [datastore2] ead4b4b6-05e9-404e-9127-90b3395e47e7/ead4b4b6-05e9-404e-9127-90b3395e47e7.vmdk or device None with type sparse {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1096.825781] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26f46878-7d20-49fd-b60f-11dce7793a2a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.832964] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1096.832964] env[61629]: value = "task-1354657" [ 1096.832964] env[61629]: _type = "Task" [ 1096.832964] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.839889] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354657, 'name': Rename_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.997133] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: a83f05b7-f998-4f45-afc1-836fae7c4b95] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1097.342827] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354657, 'name': Rename_Task, 'duration_secs': 0.129553} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.343077] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1097.343326] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7e36deb-3625-467c-a668-c9a9662e73fc {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.348696] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1097.348696] env[61629]: value = "task-1354658" [ 1097.348696] env[61629]: _type = "Task" [ 1097.348696] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.355554] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354658, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.500631] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 9d509d6e-d58f-4ac4-bd7f-c4bd2a448ae4] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1097.859838] env[61629]: DEBUG oslo_vmware.api [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354658, 'name': PowerOnVM_Task, 'duration_secs': 0.443822} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.860150] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1097.860388] env[61629]: INFO nova.compute.manager [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Took 6.63 seconds to spawn the instance on the hypervisor. [ 1097.860600] env[61629]: DEBUG nova.compute.manager [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1097.861376] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54ec248-a343-4afe-8ca6-88e61b83e2cb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.004481] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: d95162d0-cc5e-4516-b76e-8d7736be1032] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1098.376520] env[61629]: INFO nova.compute.manager [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Took 11.31 seconds to build instance. [ 1098.507135] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 0daebf05-e42b-49c5-aa24-43304a1c3cc0] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1098.878125] env[61629]: DEBUG oslo_concurrency.lockutils [None req-f4551ee9-7361-4d58-93dd-5f89e13ef87b tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Lock "ead4b4b6-05e9-404e-9127-90b3395e47e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.815s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.010139] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 22f71f92-ca9a-4b97-a652-3f34a0dabde2] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1099.213840] env[61629]: INFO nova.compute.manager [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Rescuing [ 1099.214092] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquiring lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.214258] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquired lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.214430] env[61629]: DEBUG nova.network.neutron [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Building network info cache for instance {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1099.513447] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 3994458a-195a-478c-b6d0-d8e36df989a3] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1099.882402] env[61629]: DEBUG nova.network.neutron [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Updating instance_info_cache with network_info: [{"id": "f6f32347-1a23-4769-af62-856db20d9ff7", "address": "fa:16:3e:45:78:b9", "network": {"id": "6d42a9ec-1547-45b7-81be-bce10dd9eef0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-853038587-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "65277daeb9e34c0b885893cd19d978b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6f32347-1a", "ovs_interfaceid": "f6f32347-1a23-4769-af62-856db20d9ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.016611] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 87172592-f557-467f-ace2-805fd822681d] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1100.384993] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Releasing lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.522655] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 3e00b9b5-32a6-4c2a-b0ae-6e00a4654f52] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1100.919937] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1100.920251] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-016f304d-1e34-4ab2-a38e-994ab71ca485 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.927646] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1100.927646] env[61629]: value = "task-1354659" [ 1100.927646] env[61629]: _type = "Task" [ 1100.927646] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.935285] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354659, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.026093] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: ff13a9d6-8c1b-4d37-a4a7-ec03bd87550e] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1101.437868] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354659, 'name': PowerOffVM_Task, 'duration_secs': 0.187216} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.438114] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1101.438871] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e1a239-4656-48a6-95f2-bf942e9511e9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.456205] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49bd6b65-cf0c-4244-abfa-3fa7ce01b288 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.481620] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1101.481894] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f19e64d7-f640-4267-9e13-a428a4992063 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.487694] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1101.487694] env[61629]: value = "task-1354660" [ 1101.487694] env[61629]: _type = "Task" [ 1101.487694] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.494953] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354660, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.529483] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 2ce60374-7baf-4d27-afbd-dcfaf6600a78] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1101.997535] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] VM already powered off {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1101.997792] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Processing image 7f036972-f3d8-47df-ae86-f8f2844bf80c {{(pid=61629) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1101.998018] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1101.998185] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.998386] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1101.998652] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f05d024-80e5-41e1-b6b0-883c59569990 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.007144] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61629) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1102.007335] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61629) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1102.008008] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a321b63-04d4-4764-ab7b-ac54e71af4b9 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.012549] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1102.012549] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]52a47afa-1a1f-5ea1-0b0f-40aa23e0467d" [ 1102.012549] env[61629]: _type = "Task" [ 1102.012549] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.019453] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52a47afa-1a1f-5ea1-0b0f-40aa23e0467d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.031934] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 459c5f25-8fb1-4e43-8f7f-359a7ff697f2] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1102.523355] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]52a47afa-1a1f-5ea1-0b0f-40aa23e0467d, 'name': SearchDatastore_Task, 'duration_secs': 0.008586} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.524078] env[61629]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f35fd898-f284-4d5e-81f5-d4a9fae40b5c {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.528701] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1102.528701] env[61629]: value = "session[52e40533-c391-fb92-ed9b-bc3746674c24]529e8c69-1e2e-d5a2-75fc-f49df7968582" [ 1102.528701] env[61629]: _type = "Task" [ 1102.528701] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.535650] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: c5b6f6b8-587c-4b74-bc83-98dac319b15b] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1102.537348] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]529e8c69-1e2e-d5a2-75fc-f49df7968582, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.038645] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: cd165a78-21f9-4fc7-88e5-5ab35047eacc] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1103.040529] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': session[52e40533-c391-fb92-ed9b-bc3746674c24]529e8c69-1e2e-d5a2-75fc-f49df7968582, 'name': SearchDatastore_Task, 'duration_secs': 0.008425} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.040967] env[61629]: DEBUG oslo_concurrency.lockutils [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.041234] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] ead4b4b6-05e9-404e-9127-90b3395e47e7/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk. {{(pid=61629) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1103.041483] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0dd53a39-32f6-443e-a551-8e6d843ba634 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.049909] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1103.049909] env[61629]: value = "task-1354661" [ 1103.049909] env[61629]: _type = "Task" [ 1103.049909] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.057054] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354661, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.541666] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 2d3bc46e-4258-4b6c-b0ab-5c9b1dc3900b] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1103.559915] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354661, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.435126} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.560153] env[61629]: INFO nova.virt.vmwareapi.ds_util [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/7f036972-f3d8-47df-ae86-f8f2844bf80c/7f036972-f3d8-47df-ae86-f8f2844bf80c.vmdk to [datastore2] ead4b4b6-05e9-404e-9127-90b3395e47e7/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk. [ 1103.560916] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43331fa8-8676-4f0c-b9c3-0024d4422275 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.585482] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] ead4b4b6-05e9-404e-9127-90b3395e47e7/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1103.585925] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e68c8422-2e61-4d65-8d38-266dbc1039bf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.603269] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1103.603269] env[61629]: value = "task-1354662" [ 1103.603269] env[61629]: _type = "Task" [ 1103.603269] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.610253] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354662, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.044796] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 3085a70f-360c-43a3-80d7-e7b87fb3e146] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1104.112886] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354662, 'name': ReconfigVM_Task, 'duration_secs': 0.276299} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.113201] env[61629]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Reconfigured VM instance instance-0000006a to attach disk [datastore2] ead4b4b6-05e9-404e-9127-90b3395e47e7/7f036972-f3d8-47df-ae86-f8f2844bf80c-rescue.vmdk or device None with type thin {{(pid=61629) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1104.114032] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f4eed7-facc-4465-acfc-fc2968a0c581 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.138082] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38d889c2-6fce-484c-a80f-899d0c24a75a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.152768] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1104.152768] env[61629]: value = "task-1354663" [ 1104.152768] env[61629]: _type = "Task" [ 1104.152768] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.162931] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354663, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.549275] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 7c3e9d0f-88a8-41fe-bf61-e3db34d36928] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1104.662696] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354663, 'name': ReconfigVM_Task, 'duration_secs': 0.157914} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.662933] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Powering on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1104.663199] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd98aa30-0a19-4292-8b3c-dbad30013308 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.668604] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1104.668604] env[61629]: value = "task-1354664" [ 1104.668604] env[61629]: _type = "Task" [ 1104.668604] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.680023] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354664, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.053296] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 7cf87381-235e-449b-8269-61c2d4033028] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1105.178714] env[61629]: DEBUG oslo_vmware.api [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354664, 'name': PowerOnVM_Task, 'duration_secs': 0.402129} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.178990] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Powered on the VM {{(pid=61629) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1105.181849] env[61629]: DEBUG nova.compute.manager [None req-9d917c46-a909-4442-b169-52f403ff5801 tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Checking state {{(pid=61629) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1105.182682] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352dc880-33f0-4717-99f9-903610ce22cb {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.556621] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 274e3437-eacd-4299-9c27-97bbb0ebf1c1] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1106.059922] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: a42d5132-22e5-4551-80d2-fb7a55a7fa9e] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1106.070152] env[61629]: DEBUG nova.compute.manager [req-bda17dff-182f-440d-8259-75355391a896 req-7f3ac1f9-ec5d-49a7-b03e-f4d3d274c8ff service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Received event network-changed-f6f32347-1a23-4769-af62-856db20d9ff7 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1106.070353] env[61629]: DEBUG nova.compute.manager [req-bda17dff-182f-440d-8259-75355391a896 req-7f3ac1f9-ec5d-49a7-b03e-f4d3d274c8ff service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Refreshing instance network info cache due to event network-changed-f6f32347-1a23-4769-af62-856db20d9ff7. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1106.070555] env[61629]: DEBUG oslo_concurrency.lockutils [req-bda17dff-182f-440d-8259-75355391a896 req-7f3ac1f9-ec5d-49a7-b03e-f4d3d274c8ff service nova] Acquiring lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.070716] env[61629]: DEBUG oslo_concurrency.lockutils [req-bda17dff-182f-440d-8259-75355391a896 req-7f3ac1f9-ec5d-49a7-b03e-f4d3d274c8ff service nova] Acquired lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.070905] env[61629]: DEBUG nova.network.neutron [req-bda17dff-182f-440d-8259-75355391a896 req-7f3ac1f9-ec5d-49a7-b03e-f4d3d274c8ff service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Refreshing network info cache for port f6f32347-1a23-4769-af62-856db20d9ff7 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1106.564055] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 109ab664-3bb9-420e-a4a5-526277c60b96] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1106.750677] env[61629]: DEBUG nova.network.neutron [req-bda17dff-182f-440d-8259-75355391a896 req-7f3ac1f9-ec5d-49a7-b03e-f4d3d274c8ff service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Updated VIF entry in instance network info cache for port f6f32347-1a23-4769-af62-856db20d9ff7. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1106.750913] env[61629]: DEBUG nova.network.neutron [req-bda17dff-182f-440d-8259-75355391a896 req-7f3ac1f9-ec5d-49a7-b03e-f4d3d274c8ff service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Updating instance_info_cache with network_info: [{"id": "f6f32347-1a23-4769-af62-856db20d9ff7", "address": "fa:16:3e:45:78:b9", "network": {"id": "6d42a9ec-1547-45b7-81be-bce10dd9eef0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-853038587-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "65277daeb9e34c0b885893cd19d978b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6f32347-1a", "ovs_interfaceid": "f6f32347-1a23-4769-af62-856db20d9ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.068069] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 1d451558-dbbc-4942-b739-5d4b88057a75] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1107.254344] env[61629]: DEBUG oslo_concurrency.lockutils [req-bda17dff-182f-440d-8259-75355391a896 req-7f3ac1f9-ec5d-49a7-b03e-f4d3d274c8ff service nova] Releasing lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.572262] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 12c6b03b-8295-43de-898f-a6c35f1693b7] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1107.981257] env[61629]: DEBUG nova.compute.manager [req-933a87af-9d86-4d56-92a2-e550c33860dc req-27b96f61-95d2-44f5-a2c6-ffc1522799bc service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Received event network-changed-f6f32347-1a23-4769-af62-856db20d9ff7 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1107.981465] env[61629]: DEBUG nova.compute.manager [req-933a87af-9d86-4d56-92a2-e550c33860dc req-27b96f61-95d2-44f5-a2c6-ffc1522799bc service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Refreshing instance network info cache due to event network-changed-f6f32347-1a23-4769-af62-856db20d9ff7. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1107.981685] env[61629]: DEBUG oslo_concurrency.lockutils [req-933a87af-9d86-4d56-92a2-e550c33860dc req-27b96f61-95d2-44f5-a2c6-ffc1522799bc service nova] Acquiring lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1107.981830] env[61629]: DEBUG oslo_concurrency.lockutils [req-933a87af-9d86-4d56-92a2-e550c33860dc req-27b96f61-95d2-44f5-a2c6-ffc1522799bc service nova] Acquired lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.982019] env[61629]: DEBUG nova.network.neutron [req-933a87af-9d86-4d56-92a2-e550c33860dc req-27b96f61-95d2-44f5-a2c6-ffc1522799bc service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Refreshing network info cache for port f6f32347-1a23-4769-af62-856db20d9ff7 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1108.075418] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 55f2d2fc-9404-422f-ba08-72e6e11a089f] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1108.097080] env[61629]: DEBUG nova.compute.manager [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Received event network-changed-f6f32347-1a23-4769-af62-856db20d9ff7 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1108.097429] env[61629]: DEBUG nova.compute.manager [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Refreshing instance network info cache due to event network-changed-f6f32347-1a23-4769-af62-856db20d9ff7. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1108.097583] env[61629]: DEBUG oslo_concurrency.lockutils [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] Acquiring lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.580581] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 0d21b352-bdd0-4887-8658-cd5c448352d2] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1108.679658] env[61629]: DEBUG nova.network.neutron [req-933a87af-9d86-4d56-92a2-e550c33860dc req-27b96f61-95d2-44f5-a2c6-ffc1522799bc service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Updated VIF entry in instance network info cache for port f6f32347-1a23-4769-af62-856db20d9ff7. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1108.680037] env[61629]: DEBUG nova.network.neutron [req-933a87af-9d86-4d56-92a2-e550c33860dc req-27b96f61-95d2-44f5-a2c6-ffc1522799bc service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Updating instance_info_cache with network_info: [{"id": "f6f32347-1a23-4769-af62-856db20d9ff7", "address": "fa:16:3e:45:78:b9", "network": {"id": "6d42a9ec-1547-45b7-81be-bce10dd9eef0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-853038587-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "65277daeb9e34c0b885893cd19d978b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6f32347-1a", "ovs_interfaceid": "f6f32347-1a23-4769-af62-856db20d9ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.083492] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 87a1383f-d66b-4bde-b153-89ac62ff8390] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1109.097055] env[61629]: DEBUG oslo_concurrency.lockutils [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquiring lock "ead4b4b6-05e9-404e-9127-90b3395e47e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.097327] env[61629]: DEBUG oslo_concurrency.lockutils [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Lock "ead4b4b6-05e9-404e-9127-90b3395e47e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.097556] env[61629]: DEBUG oslo_concurrency.lockutils [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquiring lock "ead4b4b6-05e9-404e-9127-90b3395e47e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.097746] env[61629]: DEBUG oslo_concurrency.lockutils [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Lock "ead4b4b6-05e9-404e-9127-90b3395e47e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.097920] env[61629]: DEBUG oslo_concurrency.lockutils [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Lock "ead4b4b6-05e9-404e-9127-90b3395e47e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.099894] env[61629]: INFO nova.compute.manager [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Terminating instance [ 1109.101584] env[61629]: DEBUG nova.compute.manager [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Start destroying the instance on the hypervisor. {{(pid=61629) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1109.101780] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Destroying instance {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1109.102609] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55436f46-4d12-4980-ba42-817ea09818ba {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.111280] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Powering off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1109.111497] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5661b98-40a2-47bd-b795-4bfe747d201b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.116983] env[61629]: DEBUG oslo_vmware.api [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1109.116983] env[61629]: value = "task-1354665" [ 1109.116983] env[61629]: _type = "Task" [ 1109.116983] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.124886] env[61629]: DEBUG oslo_vmware.api [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354665, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.182514] env[61629]: DEBUG oslo_concurrency.lockutils [req-933a87af-9d86-4d56-92a2-e550c33860dc req-27b96f61-95d2-44f5-a2c6-ffc1522799bc service nova] Releasing lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1109.182892] env[61629]: DEBUG oslo_concurrency.lockutils [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] Acquired lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.183107] env[61629]: DEBUG nova.network.neutron [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Refreshing network info cache for port f6f32347-1a23-4769-af62-856db20d9ff7 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1109.586390] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: da1eb7f9-7562-40c8-955b-c11f831b7bc8] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1109.627042] env[61629]: DEBUG oslo_vmware.api [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354665, 'name': PowerOffVM_Task, 'duration_secs': 0.208589} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.627328] env[61629]: DEBUG nova.virt.vmwareapi.vm_util [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Powered off the VM {{(pid=61629) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1109.627506] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Unregistering the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1109.627780] env[61629]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e24f57f5-43ab-42ae-a9f4-69500c1c7966 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.701201] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Unregistered the VM {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1109.701425] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Deleting contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1109.701624] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Deleting the datastore file [datastore2] ead4b4b6-05e9-404e-9127-90b3395e47e7 {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1109.701920] env[61629]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5922b43d-7e73-40be-8794-cf29cee1c20a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.708442] env[61629]: DEBUG oslo_vmware.api [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for the task: (returnval){ [ 1109.708442] env[61629]: value = "task-1354667" [ 1109.708442] env[61629]: _type = "Task" [ 1109.708442] env[61629]: } to complete. {{(pid=61629) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.716381] env[61629]: DEBUG oslo_vmware.api [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354667, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.910255] env[61629]: DEBUG nova.network.neutron [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Updated VIF entry in instance network info cache for port f6f32347-1a23-4769-af62-856db20d9ff7. {{(pid=61629) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1109.910676] env[61629]: DEBUG nova.network.neutron [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Updating instance_info_cache with network_info: [{"id": "f6f32347-1a23-4769-af62-856db20d9ff7", "address": "fa:16:3e:45:78:b9", "network": {"id": "6d42a9ec-1547-45b7-81be-bce10dd9eef0", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-853038587-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "65277daeb9e34c0b885893cd19d978b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "816c6e38-e200-4544-8c5b-9fc3e16c5761", "external-id": "nsx-vlan-transportzone-195", "segmentation_id": 195, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6f32347-1a", "ovs_interfaceid": "f6f32347-1a23-4769-af62-856db20d9ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.089898] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: d37958f8-7607-418b-9cfd-c3a5df721e94] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1110.218406] env[61629]: DEBUG oslo_vmware.api [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Task: {'id': task-1354667, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17277} completed successfully. {{(pid=61629) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.218677] env[61629]: DEBUG nova.virt.vmwareapi.ds_util [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Deleted the datastore file {{(pid=61629) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1110.218866] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Deleted contents of the VM from datastore datastore2 {{(pid=61629) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1110.219089] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Instance destroyed {{(pid=61629) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1110.219244] env[61629]: INFO nova.compute.manager [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1110.219577] env[61629]: DEBUG oslo.service.loopingcall [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61629) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1110.219723] env[61629]: DEBUG nova.compute.manager [-] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Deallocating network for instance {{(pid=61629) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1110.219816] env[61629]: DEBUG nova.network.neutron [-] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] deallocate_for_instance() {{(pid=61629) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1110.413646] env[61629]: DEBUG oslo_concurrency.lockutils [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] Releasing lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1110.413918] env[61629]: DEBUG nova.compute.manager [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Received event network-changed-f6f32347-1a23-4769-af62-856db20d9ff7 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1110.414113] env[61629]: DEBUG nova.compute.manager [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Refreshing instance network info cache due to event network-changed-f6f32347-1a23-4769-af62-856db20d9ff7. {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1110.414332] env[61629]: DEBUG oslo_concurrency.lockutils [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] Acquiring lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1110.414479] env[61629]: DEBUG oslo_concurrency.lockutils [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] Acquired lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.414643] env[61629]: DEBUG nova.network.neutron [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Refreshing network info cache for port f6f32347-1a23-4769-af62-856db20d9ff7 {{(pid=61629) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1110.429721] env[61629]: DEBUG nova.compute.manager [req-5bb4d70c-9a2d-4ff6-8bba-fdefabad3d37 req-f7af1e62-a200-4b1e-8827-25a8f15e9203 service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Received event network-vif-deleted-f6f32347-1a23-4769-af62-856db20d9ff7 {{(pid=61629) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1110.429933] env[61629]: INFO nova.compute.manager [req-5bb4d70c-9a2d-4ff6-8bba-fdefabad3d37 req-f7af1e62-a200-4b1e-8827-25a8f15e9203 service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Neutron deleted interface f6f32347-1a23-4769-af62-856db20d9ff7; detaching it from the instance and deleting it from the info cache [ 1110.430118] env[61629]: DEBUG nova.network.neutron [req-5bb4d70c-9a2d-4ff6-8bba-fdefabad3d37 req-f7af1e62-a200-4b1e-8827-25a8f15e9203 service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.593850] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 9c340ca1-75e0-4d65-8aae-0d5e11ff3e66] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1110.906451] env[61629]: DEBUG nova.network.neutron [-] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.931325] env[61629]: INFO nova.network.neutron [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Port f6f32347-1a23-4769-af62-856db20d9ff7 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1110.931559] env[61629]: DEBUG nova.network.neutron [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Updating instance_info_cache with network_info: [] {{(pid=61629) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.934057] env[61629]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5861cbba-ec12-4099-9d53-d52bf36da293 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.944287] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a961456-7910-4593-b9ab-fc3a8b1dcc49 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.965484] env[61629]: DEBUG nova.compute.manager [req-5bb4d70c-9a2d-4ff6-8bba-fdefabad3d37 req-f7af1e62-a200-4b1e-8827-25a8f15e9203 service nova] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Detach interface failed, port_id=f6f32347-1a23-4769-af62-856db20d9ff7, reason: Instance ead4b4b6-05e9-404e-9127-90b3395e47e7 could not be found. {{(pid=61629) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1111.096705] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 09890839-b1d9-4558-992d-b1a6f4c5f750] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1111.409430] env[61629]: INFO nova.compute.manager [-] [instance: ead4b4b6-05e9-404e-9127-90b3395e47e7] Took 1.19 seconds to deallocate network for instance. [ 1111.434011] env[61629]: DEBUG oslo_concurrency.lockutils [req-14c9a372-2598-4ea5-b186-492640d5696d req-441e1ff0-6e1d-4072-b31a-87c10eacc930 service nova] Releasing lock "refresh_cache-ead4b4b6-05e9-404e-9127-90b3395e47e7" {{(pid=61629) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1111.600477] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 08cb71f4-2ebe-4694-856c-2e772f319cdf] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1111.915722] env[61629]: DEBUG oslo_concurrency.lockutils [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.916624] env[61629]: DEBUG oslo_concurrency.lockutils [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.916624] env[61629]: DEBUG nova.objects.instance [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Lazy-loading 'resources' on Instance uuid ead4b4b6-05e9-404e-9127-90b3395e47e7 {{(pid=61629) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1112.104174] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 68c1e93a-2829-4764-a900-75c3479b4715] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1112.449732] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdafebcb-bd82-4a58-919c-9c022363f9a1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.457376] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810fb498-71fa-49fa-84d1-d1bdd39238bf {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.487342] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd963de-22d7-4dbb-b258-64c6271bfab1 {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.494521] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3532ae6-1ff5-4af7-8ba0-ab7c68e7dd3a {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.507183] env[61629]: DEBUG nova.compute.provider_tree [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Inventory has not changed in ProviderTree for provider: d075eff1-6f77-44a8-824e-16f3e03b4063 {{(pid=61629) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1112.607203] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: 2b01eeae-64be-44b3-b4cf-c2a8490043e3] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1113.009922] env[61629]: DEBUG nova.scheduler.client.report [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Inventory has not changed for provider d075eff1-6f77-44a8-824e-16f3e03b4063 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 151, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61629) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1113.110505] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: c3f830d6-8999-49d5-a431-b09dfdaf8313] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1113.514667] env[61629]: DEBUG oslo_concurrency.lockutils [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.598s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1113.532919] env[61629]: INFO nova.scheduler.client.report [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Deleted allocations for instance ead4b4b6-05e9-404e-9127-90b3395e47e7 [ 1113.613317] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: edb4e0f6-57ad-48cf-aa20-3b2549bff3fe] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1114.040419] env[61629]: DEBUG oslo_concurrency.lockutils [None req-98789df4-0cc5-4b7b-b073-23393ed692ba tempest-ServerRescueTestJSONUnderV235-1779724460 tempest-ServerRescueTestJSONUnderV235-1779724460-project-member] Lock "ead4b4b6-05e9-404e-9127-90b3395e47e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.943s {{(pid=61629) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.117024] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: fd6961e1-e9d9-492e-bc7e-b5bf2d7610e4] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1114.621796] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: dce0c7e1-1e47-49ad-88f7-f8f5e293d239] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1115.125843] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] [instance: b8cfaef2-5f78-4026-90b8-fe2adacd61e0] Instance has had 0 of 5 cleanup attempts {{(pid=61629) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1115.630160] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.630541] env[61629]: DEBUG nova.compute.manager [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Cleaning up deleted instances with incomplete migration {{(pid=61629) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1116.132909] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1137.212274] env[61629]: DEBUG oslo_service.periodic_task [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Running periodic task ComputeManager._sync_power_states {{(pid=61629) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1137.715194] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Getting list of instances from cluster (obj){ [ 1137.715194] env[61629]: value = "domain-c8" [ 1137.715194] env[61629]: _type = "ClusterComputeResource" [ 1137.715194] env[61629]: } {{(pid=61629) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1137.716323] env[61629]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31cba026-c452-4525-b914-318fc144764b {{(pid=61629) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.724955] env[61629]: DEBUG nova.virt.vmwareapi.vmops [None req-add8ca2b-07e4-40a9-9985-250dd20bae3a None None] Got total of 0 instances {{(pid=61629) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}}